diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index a021cdf560..58c94272ee 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -23,6 +23,39 @@ "tunnel": "0.0.6" } }, + "node_modules/@actions/cache": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", + "integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + } + }, + "node_modules/@actions/cache/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@actions/cache/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, "node_modules/@actions/core": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.4.0.tgz", @@ -54,10 +87,22 @@ "tunnel": "0.0.6" } }, + "node_modules/@actions/glob": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.1.2.tgz", + "integrity": "sha512-SclLR7Ia5sEqjkJTPs7Sd86maMDw43p769YxBOxvPvEWuPEhpAnBsQfENOpXjFYMmhCqd127bmf+YdvJqVqR4A==", + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } + }, "node_modules/@actions/http-client": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.0.tgz", - "integrity": "sha512-fm1+OPPey5ypgStT9K8zbBhICj4J4UV/TJIHDhuWlkb8KyJaAtjcZK184dTqul0dV0nPKX97FNtDXX20BTLXSA==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", + "dependencies": { + "tunnel": "^0.0.6" + } }, "node_modules/@actions/io": { "version": "1.1.1", @@ -117,6 +162,201 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@azure/abort-controller": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@azure/abort-controller/-/abort-controller-1.1.0.tgz", + "integrity": "sha512-TrRLIoSQVzfAJX9H1JeFjzAoDGcoK1IYX1UImfceTZpsyYfWr09Ss1aHW1y5TrrR3iq6RZLBwJ3E24uwPhwahw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/abort-controller/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-auth": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@azure/core-auth/-/core-auth-1.3.2.tgz", + "integrity": "sha512-7CU6DmCHIZp5ZPiZ9r3J17lTKMmYsm/zGvNkjArQwPkrLlZ1TZ+EUYfGgh2X31OLMVAQCTJZW4cXHJi02EbJnA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-auth/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-http": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.5.tgz", + "integrity": "sha512-kctMqSQ6zfnlFpuYzfUKadeTyOQYbIQ+3Rj7dzVC3Dk1dOnHroTwR9hLYKX8/n85iJpkyaksaXpuh5L7GJRYuQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-http/node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@azure/core-http/node_modules/tough-cookie": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", + "integrity": "sha512-tHdtEpQCMrc1YLrMaqXXcj6AxhYi/xgit6mZu1+EDWUn+qhUf8wMQoFIy9NXuq23zAwtcB0t/MjACGR18pcRbg==", + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@azure/core-http/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-lro": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@azure/core-lro/-/core-lro-2.2.4.tgz", + "integrity": "sha512-e1I2v2CZM0mQo8+RSix0x091Av493e4bnT22ds2fcQGslTHzM2oTbswkB65nP4iEpCxBrFxOSDPKExmTmjCVtQ==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-lro/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-paging": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@azure/core-paging/-/core-paging-1.3.0.tgz", + "integrity": "sha512-H6Tg9eBm0brHqLy0OSAGzxIh1t4UL8eZVrSUMJ60Ra9cwq2pOskFqVpz2pYoHDsBY1jZ4V/P8LRGb5D5pmC6rg==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-paging/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/core-tracing": { + "version": "1.0.0-preview.13", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.0-preview.13.tgz", + "integrity": "sha512-KxDlhXyMlh2Jhj2ykX6vNEU0Vou4nHr025KoSEiz7cS3BNiHNaZcdECk/DmLkEB0as5T7b/TpRcehJ5yV6NeXQ==", + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/core-tracing/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/logger": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@azure/logger/-/logger-1.0.3.tgz", + "integrity": "sha512-aK4s3Xxjrx3daZr3VylxejK3vG5ExXck5WOHDJ8in/k9AqlfIyFMMT1uG7u8mNjX+QRILTIn0/Xgschfh/dQ9g==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/logger/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, + "node_modules/@azure/ms-rest-js": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/@azure/ms-rest-js/-/ms-rest-js-2.6.2.tgz", + "integrity": "sha512-0/8rOxAoR9M3qKUdbGOIYtHtQkm4m5jdoDNdxTU0DkOr84KwyAdJuW/RfjJinGyig4h73DNF0rdCl6XowgCYcg==", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + } + }, + "node_modules/@azure/storage-blob": { + "version": "12.11.0", + "resolved": "https://registry.npmjs.org/@azure/storage-blob/-/storage-blob-12.11.0.tgz", + "integrity": "sha512-na+FisoARuaOWaHWpmdtk3FeuTWf2VWamdJ9/TJJzj5ZdXPLC3juoDgFs6XVuJIoK30yuBpyFBEDXVRK4pB7Tg==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/storage-blob/node_modules/tslib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", + "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==" + }, "node_modules/@babel/code-frame": { "version": "7.12.11", "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", @@ -429,6 +669,14 @@ "@octokit/openapi-types": "^9.1.1" } }, + "node_modules/@opentelemetry/api": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.1.0.tgz", + "integrity": "sha512-hf+3bwuBwtXsugA2ULBc95qxrOqP2pOekLz34BJhcAKawt94vfeNyUKpYc0lZQ/3sCP6LqRa7UAdHA7i5UODzQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@sinonjs/commons": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", @@ -497,6 +745,28 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-16.11.22.tgz", "integrity": "sha512-DYNtJWauMQ9RNpesl4aVothr97/tIJM8HbyOXJ0AYT1Z2bEjLHyfjOBPAQQVMLf8h3kSShYfNk8Wnto8B2zHUA==" }, + "node_modules/@types/node-fetch": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.2.tgz", + "integrity": "sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==", + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + } + }, + "node_modules/@types/node-fetch/node_modules/form-data": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", + "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/@types/semver": { "version": "7.3.8", "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.3.8.tgz", @@ -512,6 +782,14 @@ "@sinonjs/fake-timers": "^7.1.0" } }, + "node_modules/@types/tunnel": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/@types/tunnel/-/tunnel-0.0.3.tgz", + "integrity": "sha512-sOUTGn6h1SfQ+gbgqC364jLFBw2lnFqkgF3q0WovEHRLMrVD1sd5aufqi/aJObLekJO+Aq5z646U4Oxy6shXMA==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "4.28.5", "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.28.5.tgz", @@ -769,6 +1047,17 @@ "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/abort-controller": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", + "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "engines": { + "node": ">=6.5" + } + }, "node_modules/acorn": { "version": "7.4.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", @@ -955,6 +1244,11 @@ "node": ">=8" } }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "node_modules/ava": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/ava/-/ava-4.0.1.tgz", @@ -1478,6 +1772,17 @@ "version": "1.1.3", "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/commander": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-8.1.0.tgz", @@ -1633,6 +1938,14 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/deprecation": { "version": "2.3.1", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" @@ -2551,6 +2864,22 @@ "node": ">=0.10.0" } }, + "node_modules/event-target-shim": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", + "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "engines": { + "node": ">=0.8.x" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -2727,6 +3056,19 @@ "integrity": "sha512-OMQjaErSFHmHqZe+PSidH5n8j3O0F2DdnVh8JB4j4eUQ2k6KvB0qGfrKIhapvez5JerBbmWkaLYUYWISaESoXg==", "dev": true }, + "node_modules/form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/fs": { "version": "0.0.1-security", "integrity": "sha1-invTcYa23d84E/I4WLV+yq9eQdQ=" @@ -3074,6 +3416,14 @@ "version": "2.0.3", "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" }, + "node_modules/ip-regex": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-2.1.0.tgz", + "integrity": "sha512-58yWmlHpp7VYfcdTwMTvwMmqx/Elfxjd9RXTDyMsbL7lLWmhMylLEqiYVLKuLzOZqVgiWXD9MfR62Vv89VRxkw==", + "engines": { + "node": ">=4" + } + }, "node_modules/irregular-plurals": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/irregular-plurals/-/irregular-plurals-3.3.0.tgz", @@ -3644,6 +3994,25 @@ "node": ">=8.6" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", @@ -4247,11 +4616,15 @@ "node": ">= 8" } }, + "node_modules/psl": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", + "integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==" + }, "node_modules/punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", - "dev": true, "engines": { "node": ">=6" } @@ -4542,6 +4915,11 @@ "version": "1.1.9", "integrity": "sha512-DEqnSRTDw/Tc3FXf49zedI638Z9onwUotBMiUFKmrO2sdFKIbXamXGQ3Axd4qgphxKB4kw/qP1w5kTxnfU1B9Q==" }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, "node_modules/semver": { "version": "7.3.5", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", @@ -5047,6 +5425,19 @@ "node": ">=8.0" } }, + "node_modules/tough-cookie": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-3.0.1.tgz", + "integrity": "sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg==", + "dependencies": { + "ip-regex": "^2.1.0", + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=6" + } + }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", @@ -5066,8 +5457,7 @@ "node_modules/tslib": { "version": "1.14.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", - "dev": true + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" }, "node_modules/tsutils": { "version": "3.21.0", @@ -5337,6 +5727,26 @@ "typedarray-to-buffer": "^3.1.5" } }, + "node_modules/xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/node_modules/@actions/cache/LICENSE.md b/node_modules/@actions/cache/LICENSE.md new file mode 100644 index 0000000000..dbae2edb2c --- /dev/null +++ b/node_modules/@actions/cache/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/cache/README.md b/node_modules/@actions/cache/README.md new file mode 100644 index 0000000000..541b115564 --- /dev/null +++ b/node_modules/@actions/cache/README.md @@ -0,0 +1,44 @@ +# `@actions/cache` + +> Functions necessary for caching dependencies and build outputs to improve workflow execution time. + +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows) for how caching works. + +Note that GitHub will remove any cache entries that have not been accessed in over 7 days. There is no limit on the number of caches you can store, but the total size of all caches in a repository is limited to 10 GB. If you exceed this limit, GitHub will save your cache but will begin evicting caches until the total size is less than 10 GB. + +## Usage + +This package is used by the v2+ versions of our first party cache action. You can find an example implementation in the cache repo [here](https://github.com/actions/cache). + +#### Save Cache + +Saves a cache containing the files in `paths` using the `key` provided. The files would be compressed using zstandard compression algorithm if zstd is installed, otherwise gzip is used. Function returns the cache id if the cache was saved succesfully and throws an error if cache upload fails. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const cacheId = await cache.saveCache(paths, key) +``` + +#### Restore Cache + +Restores a cache based on `key` and `restoreKeys` to the `paths` provided. Function returns the cache key for cache hit and returns undefined if cache not found. + +```js +const cache = require('@actions/cache'); +const paths = [ + 'node_modules', + 'packages/*/node_modules/' +] +const key = 'npm-foobar-d5ea0750' +const restoreKeys = [ + 'npm-foobar-', + 'npm-' +] +const cacheKey = await cache.restoreCache(paths, key, restoreKeys) +``` + diff --git a/node_modules/@actions/cache/lib/cache.d.ts b/node_modules/@actions/cache/lib/cache.d.ts new file mode 100644 index 0000000000..16b20f7574 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.d.ts @@ -0,0 +1,32 @@ +import { DownloadOptions, UploadOptions } from './options'; +export declare class ValidationError extends Error { + constructor(message: string); +} +export declare class ReserveCacheError extends Error { + constructor(message: string); +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +export declare function isFeatureAvailable(): boolean; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +export declare function restoreCache(paths: string[], primaryKey: string, restoreKeys?: string[], options?: DownloadOptions): Promise; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +export declare function saveCache(paths: string[], key: string, options?: UploadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/cache.js b/node_modules/@actions/cache/lib/cache.js new file mode 100644 index 0000000000..4dc5e88a50 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js @@ -0,0 +1,210 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const path = __importStar(require("path")); +const utils = __importStar(require("./internal/cacheUtils")); +const cacheHttpClient = __importStar(require("./internal/cacheHttpClient")); +const tar_1 = require("./internal/tar"); +class ValidationError extends Error { + constructor(message) { + super(message); + this.name = 'ValidationError'; + Object.setPrototypeOf(this, ValidationError.prototype); + } +} +exports.ValidationError = ValidationError; +class ReserveCacheError extends Error { + constructor(message) { + super(message); + this.name = 'ReserveCacheError'; + Object.setPrototypeOf(this, ReserveCacheError.prototype); + } +} +exports.ReserveCacheError = ReserveCacheError; +function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); + } +} +function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + } +} +/** + * isFeatureAvailable to check the presence of Actions cache service + * + * @returns boolean return true if Actions cache service feature is available, otherwise false + */ +function isFeatureAvailable() { + return !!process.env['ACTIONS_CACHE_URL']; +} +exports.isFeatureAvailable = isFeatureAvailable; +/** + * Restores cache from keys + * + * @param paths a list of file paths to restore from the cache + * @param primaryKey an explicit key for restoring the cache + * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key + * @param downloadOptions cache download options + * @returns string returns the key for the cache hit, otherwise returns undefined + */ +function restoreCache(paths, primaryKey, restoreKeys, options) { + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core.debug('Resolved Keys:'); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ''; + try { + // path are needed to compute version + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + // Cache not found + return undefined; + } + archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield tar_1.extractTar(archivePath, compressionMethod); + core.info('Cache restored successfully'); + return cacheEntry.cacheKey; + } + catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } + else { + // Supress all non-validation cache related errors because caching should be optional + core.warning(`Failed to restore: ${error.message}`); + } + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return undefined; + }); +} +exports.restoreCache = restoreCache; +/** + * Saves a list of files with the specified key + * + * @param paths a list of file paths to be cached + * @param key an explicit key for restoring the cache + * @param options cache upload options + * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails + */ +function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; + return __awaiter(this, void 0, void 0, function* () { + checkPaths(paths); + checkKey(key); + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core.debug('Cache Paths:'); + core.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core.debug(`Archive Path: ${archivePath}`); + try { + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); + } + catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } + else if (typedError.name === ReserveCacheError.name) { + core.info(`Failed to save: ${typedError.message}`); + } + else { + core.warning(`Failed to save: ${typedError.message}`); + } + } + finally { + // Try to delete the archive to save space + try { + yield utils.unlinkFile(archivePath); + } + catch (error) { + core.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/cache.js.map b/node_modules/@actions/cache/lib/cache.js.map new file mode 100644 index 0000000000..fb11947e03 --- /dev/null +++ b/node_modules/@actions/cache/lib/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../src/cache.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,2CAA4B;AAC5B,6DAA8C;AAC9C,4EAA6D;AAC7D,wCAA6D;AAG7D,MAAa,eAAgB,SAAQ,KAAK;IACxC,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAA;QAC7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CACF;AAND,0CAMC;AAED,MAAa,iBAAkB,SAAQ,KAAK;IAC1C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAA;QACd,IAAI,CAAC,IAAI,GAAG,mBAAmB,CAAA;QAC/B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,iBAAiB,CAAC,SAAS,CAAC,CAAA;IAC1D,CAAC;CACF;AAND,8CAMC;AAED,SAAS,UAAU,CAAC,KAAe;IACjC,IAAI,CAAC,KAAK,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QAChC,MAAM,IAAI,eAAe,CACvB,wEAAwE,CACzE,CAAA;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,GAAW;IAC3B,IAAI,GAAG,CAAC,MAAM,GAAG,GAAG,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,wCAAwC,CACrE,CAAA;KACF;IACD,MAAM,KAAK,GAAG,SAAS,CAAA;IACvB,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACpB,MAAM,IAAI,eAAe,CACvB,yBAAyB,GAAG,yBAAyB,CACtD,CAAA;KACF;AACH,CAAC;AAED;;;;GAIG;AAEH,SAAgB,kBAAkB;IAChC,OAAO,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAA;AAC3C,CAAC;AAFD,gDAEC;AAED;;;;;;;;GAQG;AACH,SAAsB,YAAY,CAChC,KAAe,EACf,UAAkB,EAClB,WAAsB,EACtB,OAAyB;;QAEzB,UAAU,CAAC,KAAK,CAAC,CAAA;QAEjB,WAAW,GAAG,WAAW,IAAI,EAAE,CAAA;QAC/B,MAAM,IAAI,GAAG,CAAC,UAAU,EAAE,GAAG,WAAW,CAAC,CAAA;QAEzC,IAAI,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAA;QAC5B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;QAEhC,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;YACpB,MAAM,IAAI,eAAe,CACvB,4DAA4D,CAC7D,CAAA;SACF;QACD,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;SACd;QAED,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAC5D,IAAI,WAAW,GAAG,EAAE,CAAA;QACpB,IAAI;YACF,qCAAqC;YACrC,MAAM,UAAU,GAAG,MAAM,eAAe,CAAC,aAAa,CAAC,IAAI,EAAE,KAAK,EAAE;gBAClE,iBAAiB;aAClB,CAAC,CAAA;YAEF,IAAI,EAAC,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,eAAe,CAAA,EAAE;gBAChC,kBAAkB;gBAClB,OAAO,SAAS,CAAA;aACjB;YAED,WAAW,GAAG,IAAI,CAAC,IAAI,CACrB,MAAM,KAAK,CAAC,mBAAmB,EAAE,EACjC,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;YACD,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;YAE1C,0CAA0C;YAC1C,MAAM,eAAe,CAAC,aAAa,CACjC,UAAU,CAAC,eAAe,EAC1B,WAAW,EACX,OAAO,CACR,CAAA;YAED,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,aAAO,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YAED,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CACxB,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,KAAK,CAC9B,CAAA;YAED,MAAM,gBAAU,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;YAChD,IAAI,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAA;YAExC,OAAO,UAAU,CAAC,QAAQ,CAAA;SAC3B;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,UAAU,GAAG,KAAc,CAAA;YACjC,IAAI,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI,EAAE;gBAC5C,MAAM,KAAK,CAAA;aACZ;iBAAM;gBACL,qFAAqF;gBACrF,IAAI,CAAC,OAAO,CAAC,sBAAuB,KAAe,CAAC,OAAO,EAAE,CAAC,CAAA;aAC/D;SACF;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,SAAS,CAAA;IAClB,CAAC;CAAA;AAlFD,oCAkFC;AAED;;;;;;;GAOG;AACH,SAAsB,SAAS,CAC7B,KAAe,EACf,GAAW,EACX,OAAuB;;;QAEvB,UAAU,CAAC,KAAK,CAAC,CAAA;QACjB,QAAQ,CAAC,GAAG,CAAC,CAAA;QAEb,MAAM,iBAAiB,GAAG,MAAM,KAAK,CAAC,oBAAoB,EAAE,CAAA;QAC5D,IAAI,OAAO,GAAG,CAAC,CAAC,CAAA;QAEhB,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,YAAY,CAAC,KAAK,CAAC,CAAA;QAClD,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,EAAE,CAAC,CAAA;QAE3C,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAC3B,MAAM,IAAI,KAAK,CACb,qHAAqH,CACtH,CAAA;SACF;QAED,MAAM,aAAa,GAAG,MAAM,KAAK,CAAC,mBAAmB,EAAE,CAAA;QACvD,MAAM,WAAW,GAAG,IAAI,CAAC,IAAI,CAC3B,aAAa,EACb,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAC1C,CAAA;QAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,WAAW,EAAE,CAAC,CAAA;QAE1C,IAAI;YACF,MAAM,eAAS,CAAC,aAAa,EAAE,UAAU,EAAE,iBAAiB,CAAC,CAAA;YAC7D,IAAI,IAAI,CAAC,OAAO,EAAE,EAAE;gBAClB,MAAM,aAAO,CAAC,WAAW,EAAE,iBAAiB,CAAC,CAAA;aAC9C;YACD,MAAM,aAAa,GAAG,EAAE,GAAG,IAAI,GAAG,IAAI,GAAG,IAAI,CAAA,CAAC,sBAAsB;YACpE,MAAM,eAAe,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YACpE,IAAI,CAAC,KAAK,CAAC,cAAc,eAAe,EAAE,CAAC,CAAA;YAE3C,2FAA2F;YAC3F,IAAI,eAAe,GAAG,aAAa,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,EAAE;gBACtD,MAAM,IAAI,KAAK,CACb,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,+CAA+C,CACxE,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;YAC7B,MAAM,oBAAoB,GAAG,MAAM,eAAe,CAAC,YAAY,CAC7D,GAAG,EACH,KAAK,EACL;gBACE,iBAAiB;gBACjB,SAAS,EAAE,eAAe;aAC3B,CACF,CAAA;YAED,UAAI,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,EAAE;gBACzC,OAAO,SAAG,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,MAAM,0CAAE,OAAO,CAAA;aAChD;iBAAM,IAAI,CAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,UAAU,MAAK,GAAG,EAAE;gBACnD,MAAM,IAAI,KAAK,aACb,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,mCAClC,kBAAkB,IAAI,CAAC,KAAK,CAC1B,eAAe,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAChC,QAAQ,eAAe,mDAAmD,CAC9E,CAAA;aACF;iBAAM;gBACL,MAAM,IAAI,iBAAiB,CACzB,oCAAoC,GAAG,2DAA2D,MAAA,oBAAoB,aAApB,oBAAoB,uBAApB,oBAAoB,CAAE,KAAK,0CAAE,OAAO,EAAE,CACzI,CAAA;aACF;YAED,IAAI,CAAC,KAAK,CAAC,qBAAqB,OAAO,GAAG,CAAC,CAAA;YAC3C,MAAM,eAAe,CAAC,SAAS,CAAC,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;SAC/D;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,UAAU,GAAG,KAAc,CAAA;YACjC,IAAI,UAAU,CAAC,IAAI,KAAK,eAAe,CAAC,IAAI,EAAE;gBAC5C,MAAM,KAAK,CAAA;aACZ;iBAAM,IAAI,UAAU,CAAC,IAAI,KAAK,iBAAiB,CAAC,IAAI,EAAE;gBACrD,IAAI,CAAC,IAAI,CAAC,mBAAmB,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;aACnD;iBAAM;gBACL,IAAI,CAAC,OAAO,CAAC,mBAAmB,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;aACtD;SACF;gBAAS;YACR,0CAA0C;YAC1C,IAAI;gBACF,MAAM,KAAK,CAAC,UAAU,CAAC,WAAW,CAAC,CAAA;aACpC;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,CAAC,KAAK,CAAC,6BAA6B,KAAK,EAAE,CAAC,CAAA;aACjD;SACF;QAED,OAAO,OAAO,CAAA;;CACf;AA7FD,8BA6FC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts new file mode 100644 index 0000000000..0497cabdeb --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.d.ts @@ -0,0 +1,8 @@ +import { CompressionMethod } from './constants'; +import { ArtifactCacheEntry, InternalCacheOptions, ReserveCacheResponse, ITypedResponseWithError } from './contracts'; +import { DownloadOptions, UploadOptions } from '../options'; +export declare function getCacheVersion(paths: string[], compressionMethod?: CompressionMethod): string; +export declare function getCacheEntry(keys: string[], paths: string[], options?: InternalCacheOptions): Promise; +export declare function downloadCache(archiveLocation: string, archivePath: string, options?: DownloadOptions): Promise; +export declare function reserveCache(key: string, paths: string[], options?: InternalCacheOptions): Promise>; +export declare function saveCache(cacheId: number, archivePath: string, options?: UploadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js new file mode 100644 index 0000000000..cc331468f5 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js @@ -0,0 +1,213 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const auth_1 = require("@actions/http-client/lib/auth"); +const crypto = __importStar(require("crypto")); +const fs = __importStar(require("fs")); +const url_1 = require("url"); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const downloadUtils_1 = require("./downloadUtils"); +const options_1 = require("../options"); +const requestUtils_1 = require("./requestUtils"); +const versionSalt = '1.0'; +function getCacheApiUrl(resource) { + const baseUrl = process.env['ACTIONS_CACHE_URL'] || ''; + if (!baseUrl) { + throw new Error('Cache Service Url not found, unable to restore cache.'); + } + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core.debug(`Resource Url: ${url}`); + return url; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader('application/json', '6.0-preview.1') + } + }; + return requestOptions; +} +function createHttpClient() { + const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions()); +} +function getCacheVersion(paths, compressionMethod) { + const components = paths.concat(!compressionMethod || compressionMethod === constants_1.CompressionMethod.Gzip + ? [] + : [compressionMethod]); + // Add salt to cache version to support breaking changes in cache entry + components.push(versionSalt); + return crypto + .createHash('sha256') + .update(components.join('|')) + .digest('hex'); +} +exports.getCacheVersion = getCacheVersion; +function getCacheEntry(keys, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; + const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); + if (response.statusCode === 204) { + return null; + } + if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error('Cache not found.'); + } + core.setSecret(cacheDownloadUrl); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(archiveLocation, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = options_1.getDownloadOptions(options); + if (downloadOptions.useAzureSdk && + archiveUrl.hostname.endsWith('.blob.core.windows.net')) { + // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. + yield downloadUtils_1.downloadCacheStorageSDK(archiveLocation, archivePath, downloadOptions); + } + else { + // Otherwise, download using the Actions http-client. + yield downloadUtils_1.downloadCacheHttpClient(archiveLocation, archivePath); + } + }); +} +exports.downloadCache = downloadCache; +// Reserve Cache +function reserveCache(key, paths, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); + const reserveCacheRequest = { + key, + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); + })); + return response; + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + 'Content-Type': 'application/octet-stream', + 'Content-Range': getContentRange(start, end) + }; + const uploadChunkResponse = yield requestUtils_1.retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { + return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); + })); + if (!requestUtils_1.isSuccessStatusCode(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + }); +} +function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs.openSync(archivePath, 'r'); + const uploadOptions = options_1.getUploadOptions(options); + const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug('Awaiting all uploads'); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on('error', error => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(httpClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield requestUtils_1.retryTypedResponse('commitCache', () => __awaiter(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); +} +function saveCache(cacheId, archivePath, options) { + return __awaiter(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + core.debug('Upload cache'); + yield uploadFile(httpClient, cacheId, archivePath, options); + // Commit Cache + core.debug('Commiting cache'); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core.info('Cache saved successfully'); + }); +} +exports.saveCache = saveCache; +//# sourceMappingURL=cacheHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map new file mode 100644 index 0000000000..de9436a91c --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheHttpClient.js","sourceRoot":"","sources":["../../src/internal/cacheHttpClient.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAA+C;AAC/C,wDAAqE;AAKrE,+CAAgC;AAChC,uCAAwB;AACxB,6BAAuB;AAEvB,oDAAqC;AACrC,2CAA6C;AAS7C,mDAAgF;AAChF,wCAKmB;AACnB,iDAIuB;AAEvB,MAAM,WAAW,GAAG,KAAK,CAAA;AAEzB,SAAS,cAAc,CAAC,QAAgB;IACtC,MAAM,OAAO,GAAW,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,EAAE,CAAA;IAC9D,IAAI,CAAC,OAAO,EAAE;QACZ,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAA;KACzE;IAED,MAAM,GAAG,GAAG,GAAG,OAAO,uBAAuB,QAAQ,EAAE,CAAA;IACvD,IAAI,CAAC,KAAK,CAAC,iBAAiB,GAAG,EAAE,CAAC,CAAA;IAClC,OAAO,GAAG,CAAA;AACZ,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAY,EAAE,UAAkB;IAC1D,OAAO,GAAG,IAAI,gBAAgB,UAAU,EAAE,CAAA;AAC5C,CAAC;AAED,SAAS,iBAAiB;IACxB,MAAM,cAAc,GAAmB;QACrC,OAAO,EAAE;YACP,MAAM,EAAE,kBAAkB,CAAC,kBAAkB,EAAE,eAAe,CAAC;SAChE;KACF,CAAA;IAED,OAAO,cAAc,CAAA;AACvB,CAAC;AAED,SAAS,gBAAgB;IACvB,MAAM,KAAK,GAAG,OAAO,CAAC,GAAG,CAAC,uBAAuB,CAAC,IAAI,EAAE,CAAA;IACxD,MAAM,uBAAuB,GAAG,IAAI,8BAAuB,CAAC,KAAK,CAAC,CAAA;IAElE,OAAO,IAAI,wBAAU,CACnB,eAAe,EACf,CAAC,uBAAuB,CAAC,EACzB,iBAAiB,EAAE,CACpB,CAAA;AACH,CAAC;AAED,SAAgB,eAAe,CAC7B,KAAe,EACf,iBAAqC;IAErC,MAAM,UAAU,GAAG,KAAK,CAAC,MAAM,CAC7B,CAAC,iBAAiB,IAAI,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QAChE,CAAC,CAAC,EAAE;QACJ,CAAC,CAAC,CAAC,iBAAiB,CAAC,CACxB,CAAA;IAED,uEAAuE;IACvE,UAAU,CAAC,IAAI,CAAC,WAAW,CAAC,CAAA;IAE5B,OAAO,MAAM;SACV,UAAU,CAAC,QAAQ,CAAC;SACpB,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAC5B,MAAM,CAAC,KAAK,CAAC,CAAA;AAClB,CAAC;AAjBD,0CAiBC;AAED,SAAsB,aAAa,CACjC,IAAc,EACd,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAAC,CAAA;QAClE,MAAM,QAAQ,GAAG,cAAc,kBAAkB,CAC/C,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CACf,YAAY,OAAO,EAAE,CAAA;QAEtB,MAAM,QAAQ,GAAG,MAAM,iCAAkB,CAAC,eAAe,EAAE,GAAS,EAAE,gDACpE,OAAA,UAAU,CAAC,OAAO,CAAqB,cAAc,CAAC,QAAQ,CAAC,CAAC,CAAA,GAAA,CACjE,CAAA;QACD,IAAI,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;YAC/B,OAAO,IAAI,CAAA;SACZ;QACD,IAAI,CAAC,kCAAmB,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YAC7C,MAAM,IAAI,KAAK,CAAC,gCAAgC,QAAQ,CAAC,UAAU,EAAE,CAAC,CAAA;SACvE;QAED,MAAM,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAA;QACnC,MAAM,gBAAgB,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,eAAe,CAAA;QACrD,IAAI,CAAC,gBAAgB,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,kBAAkB,CAAC,CAAA;SACpC;QACD,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,CAAA;QAChC,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAA;QAC3B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC,CAAA;QAEvC,OAAO,WAAW,CAAA;IACpB,CAAC;CAAA;AA/BD,sCA+BC;AAED,SAAsB,aAAa,CACjC,eAAuB,EACvB,WAAmB,EACnB,OAAyB;;QAEzB,MAAM,UAAU,GAAG,IAAI,SAAG,CAAC,eAAe,CAAC,CAAA;QAC3C,MAAM,eAAe,GAAG,4BAAkB,CAAC,OAAO,CAAC,CAAA;QAEnD,IACE,eAAe,CAAC,WAAW;YAC3B,UAAU,CAAC,QAAQ,CAAC,QAAQ,CAAC,wBAAwB,CAAC,EACtD;YACA,6FAA6F;YAC7F,MAAM,uCAAuB,CAAC,eAAe,EAAE,WAAW,EAAE,eAAe,CAAC,CAAA;SAC7E;aAAM;YACL,qDAAqD;YACrD,MAAM,uCAAuB,CAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;IACH,CAAC;CAAA;AAlBD,sCAkBC;AAED,gBAAgB;AAChB,SAAsB,YAAY,CAChC,GAAW,EACX,KAAe,EACf,OAA8B;;QAE9B,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,eAAe,CAAC,KAAK,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAAC,CAAA;QAElE,MAAM,mBAAmB,GAAwB;YAC/C,GAAG;YACH,OAAO;YACP,SAAS,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,SAAS;SAC9B,CAAA;QACD,MAAM,QAAQ,GAAG,MAAM,iCAAkB,CAAC,cAAc,EAAE,GAAS,EAAE;YACnE,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,QAAQ,CAAC,EACxB,mBAAmB,CACpB,CAAA;UAAA,CACF,CAAA;QACD,OAAO,QAAQ,CAAA;IACjB,CAAC;CAAA;AApBD,oCAoBC;AAED,SAAS,eAAe,CAAC,KAAa,EAAE,GAAW;IACjD,oCAAoC;IACpC,8BAA8B;IAC9B,oBAAoB;IACpB,2CAA2C;IAC3C,+BAA+B;IAC/B,OAAO,SAAS,KAAK,IAAI,GAAG,IAAI,CAAA;AAClC,CAAC;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,WAAmB,EACnB,UAAuC,EACvC,KAAa,EACb,GAAW;;QAEX,IAAI,CAAC,KAAK,CACR,2BAA2B,GAAG;YAC5B,KAAK;YACL,CAAC,oBAAoB,KAAK,wBAAwB,eAAe,CACjE,KAAK,EACL,GAAG,CACJ,EAAE,CACJ,CAAA;QACD,MAAM,iBAAiB,GAAG;YACxB,cAAc,EAAE,0BAA0B;YAC1C,eAAe,EAAE,eAAe,CAAC,KAAK,EAAE,GAAG,CAAC;SAC7C,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,sCAAuB,CACvD,uBAAuB,KAAK,UAAU,GAAG,GAAG,EAC5C,GAAS,EAAE;YACT,OAAA,UAAU,CAAC,UAAU,CACnB,OAAO,EACP,WAAW,EACX,UAAU,EAAE,EACZ,iBAAiB,CAClB,CAAA;UAAA,CACJ,CAAA;QAED,IAAI,CAAC,kCAAmB,CAAC,mBAAmB,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,OAAO,CAAC,UAAU,uBAAuB,CAC9F,CAAA;SACF;IACH,CAAC;CAAA;AAED,SAAe,UAAU,CACvB,UAAsB,EACtB,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,gBAAgB;QAChB,MAAM,QAAQ,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC7D,MAAM,WAAW,GAAG,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAA;QAClE,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;QACxC,MAAM,aAAa,GAAG,0BAAgB,CAAC,OAAO,CAAC,CAAA;QAE/C,MAAM,WAAW,GAAG,KAAK,CAAC,aAAa,CACrC,mBAAmB,EACnB,aAAa,CAAC,iBAAiB,CAChC,CAAA;QACD,MAAM,YAAY,GAAG,KAAK,CAAC,aAAa,CACtC,iBAAiB,EACjB,aAAa,CAAC,eAAe,CAC9B,CAAA;QAED,MAAM,eAAe,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;QAC1D,IAAI,CAAC,KAAK,CAAC,sBAAsB,CAAC,CAAA;QAClC,IAAI,MAAM,GAAG,CAAC,CAAA;QAEd,IAAI;YACF,MAAM,OAAO,CAAC,GAAG,CACf,eAAe,CAAC,GAAG,CAAC,GAAS,EAAE;gBAC7B,OAAO,MAAM,GAAG,QAAQ,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,GAAG,MAAM,EAAE,YAAY,CAAC,CAAA;oBAC3D,MAAM,KAAK,GAAG,MAAM,CAAA;oBACpB,MAAM,GAAG,GAAG,MAAM,GAAG,SAAS,GAAG,CAAC,CAAA;oBAClC,MAAM,IAAI,YAAY,CAAA;oBAEtB,MAAM,WAAW,CACf,UAAU,EACV,WAAW,EACX,GAAG,EAAE,CACH,EAAE;yBACC,gBAAgB,CAAC,WAAW,EAAE;wBAC7B,EAAE;wBACF,KAAK;wBACL,GAAG;wBACH,SAAS,EAAE,KAAK;qBACjB,CAAC;yBACD,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,qDAAqD,KAAK,CAAC,OAAO,EAAE,CACrE,CAAA;oBACH,CAAC,CAAC,EACN,KAAK,EACL,GAAG,CACJ,CAAA;iBACF;YACH,CAAC,CAAA,CAAC,CACH,CAAA;SACF;gBAAS;YACR,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;SACjB;QACD,OAAM;IACR,CAAC;CAAA;AAED,SAAe,WAAW,CACxB,UAAsB,EACtB,OAAe,EACf,QAAgB;;QAEhB,MAAM,kBAAkB,GAAuB,EAAC,IAAI,EAAE,QAAQ,EAAC,CAAA;QAC/D,OAAO,MAAM,iCAAkB,CAAC,aAAa,EAAE,GAAS,EAAE;YACxD,OAAA,UAAU,CAAC,QAAQ,CACjB,cAAc,CAAC,UAAU,OAAO,CAAC,QAAQ,EAAE,EAAE,CAAC,EAC9C,kBAAkB,CACnB,CAAA;UAAA,CACF,CAAA;IACH,CAAC;CAAA;AAED,SAAsB,SAAS,CAC7B,OAAe,EACf,WAAmB,EACnB,OAAuB;;QAEvB,MAAM,UAAU,GAAG,gBAAgB,EAAE,CAAA;QAErC,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,CAAA;QAC1B,MAAM,UAAU,CAAC,UAAU,EAAE,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAA;QAE3D,eAAe;QACf,IAAI,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAA;QAC7B,MAAM,SAAS,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;QAC9D,IAAI,CAAC,IAAI,CACP,gBAAgB,IAAI,CAAC,KAAK,CAAC,SAAS,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ,SAAS,KAAK,CAC5E,CAAA;QAED,MAAM,mBAAmB,GAAG,MAAM,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,SAAS,CAAC,CAAA;QAC7E,IAAI,CAAC,kCAAmB,CAAC,mBAAmB,CAAC,UAAU,CAAC,EAAE;YACxD,MAAM,IAAI,KAAK,CACb,gCAAgC,mBAAmB,CAAC,UAAU,uBAAuB,CACtF,CAAA;SACF;QAED,IAAI,CAAC,IAAI,CAAC,0BAA0B,CAAC,CAAA;IACvC,CAAC;CAAA;AAzBD,8BAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts new file mode 100644 index 0000000000..ab2e930edf --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.d.ts @@ -0,0 +1,12 @@ +/// +import * as fs from 'fs'; +import { CompressionMethod } from './constants'; +export declare function createTempDirectory(): Promise; +export declare function getArchiveFileSizeInBytes(filePath: string): number; +export declare function resolvePaths(patterns: string[]): Promise; +export declare function unlinkFile(filePath: fs.PathLike): Promise; +export declare function getCompressionMethod(): Promise; +export declare function getCacheFileName(compressionMethod: CompressionMethod): string; +export declare function isGnuTarInstalled(): Promise; +export declare function assertDefined(name: string, value?: T): T; +export declare function isGhes(): boolean; diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js b/node_modules/@actions/cache/lib/internal/cacheUtils.js new file mode 100644 index 0000000000..9ab8a0b7a8 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js @@ -0,0 +1,175 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const exec = __importStar(require("@actions/exec")); +const glob = __importStar(require("@actions/glob")); +const io = __importStar(require("@actions/io")); +const fs = __importStar(require("fs")); +const path = __importStar(require("path")); +const semver = __importStar(require("semver")); +const util = __importStar(require("util")); +const uuid_1 = require("uuid"); +const constants_1 = require("./constants"); +// From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === 'win32'; + let tempDirectory = process.env['RUNNER_TEMP'] || ''; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env['USERPROFILE'] || 'C:\\'; + } + else { + if (process.platform === 'darwin') { + baseLocation = '/Users'; + } + else { + baseLocation = '/home'; + } + } + tempDirectory = path.join(baseLocation, 'actions', 'temp'); + } + const dest = path.join(tempDirectory, uuid_1.v4()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function getArchiveFileSizeInBytes(filePath) { + return fs.statSync(filePath).size; +} +exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +function resolvePaths(patterns) { + var e_1, _a; + var _b; + return __awaiter(this, void 0, void 0, function* () { + const paths = []; + const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); + const globber = yield glob.create(patterns.join('\n'), { + implicitDescendants: false + }); + try { + for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { + const file = _d.value; + const relativeFile = path + .relative(workspace, file) + .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); + core.debug(`Matched: ${relativeFile}`); + // Paths are made relative so the tar entries are all relative to the root of the workspace. + paths.push(`${relativeFile}`); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); + } + finally { if (e_1) throw e_1.error; } + } + return paths; + }); +} +exports.resolvePaths = resolvePaths; +function unlinkFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + return util.promisify(fs.unlink)(filePath); + }); +} +exports.unlinkFile = unlinkFile; +function getVersion(app) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Checking ${app} --version`); + let versionOutput = ''; + try { + yield exec.exec(`${app} --version`, [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + } + catch (err) { + core.debug(err.message); + } + versionOutput = versionOutput.trim(); + core.debug(versionOutput); + return versionOutput; + }); +} +// Use zstandard if possible to maximize cache performance +function getCompressionMethod() { + return __awaiter(this, void 0, void 0, function* () { + if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { + // Disable zstd due to bug https://github.com/actions/cache/issues/301 + return constants_1.CompressionMethod.Gzip; + } + const versionOutput = yield getVersion('zstd'); + const version = semver.clean(versionOutput); + if (!versionOutput.toLowerCase().includes('zstd command line interface')) { + // zstd is not installed + return constants_1.CompressionMethod.Gzip; + } + else if (!version || semver.lt(version, 'v1.3.2')) { + // zstd is installed but using a version earlier than v1.3.2 + // v1.3.2 is required to use the `--long` options in zstd + return constants_1.CompressionMethod.ZstdWithoutLong; + } + else { + return constants_1.CompressionMethod.Zstd; + } + }); +} +exports.getCompressionMethod = getCompressionMethod; +function getCacheFileName(compressionMethod) { + return compressionMethod === constants_1.CompressionMethod.Gzip + ? constants_1.CacheFilename.Gzip + : constants_1.CacheFilename.Zstd; +} +exports.getCacheFileName = getCacheFileName; +function isGnuTarInstalled() { + return __awaiter(this, void 0, void 0, function* () { + const versionOutput = yield getVersion('tar'); + return versionOutput.toLowerCase().includes('gnu tar'); + }); +} +exports.isGnuTarInstalled = isGnuTarInstalled; +function assertDefined(name, value) { + if (value === undefined) { + throw Error(`Expected ${name} but value was undefiend`); + } + return value; +} +exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; +//# sourceMappingURL=cacheUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/cacheUtils.js.map b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map new file mode 100644 index 0000000000..6d7360b456 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/cacheUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cacheUtils.js","sourceRoot":"","sources":["../../src/internal/cacheUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,oDAAqC;AACrC,oDAAqC;AACrC,gDAAiC;AACjC,uCAAwB;AACxB,2CAA4B;AAC5B,+CAAgC;AAChC,2CAA4B;AAC5B,+BAAiC;AACjC,2CAA4D;AAE5D,8FAA8F;AAC9F,SAAsB,mBAAmB;;QACvC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;QAE/C,IAAI,aAAa,GAAW,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,EAAE,CAAA;QAE5D,IAAI,CAAC,aAAa,EAAE;YAClB,IAAI,YAAoB,CAAA;YACxB,IAAI,UAAU,EAAE;gBACd,8CAA8C;gBAC9C,YAAY,GAAG,OAAO,CAAC,GAAG,CAAC,aAAa,CAAC,IAAI,MAAM,CAAA;aACpD;iBAAM;gBACL,IAAI,OAAO,CAAC,QAAQ,KAAK,QAAQ,EAAE;oBACjC,YAAY,GAAG,QAAQ,CAAA;iBACxB;qBAAM;oBACL,YAAY,GAAG,OAAO,CAAA;iBACvB;aACF;YACD,aAAa,GAAG,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,SAAS,EAAE,MAAM,CAAC,CAAA;SAC3D;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,SAAM,EAAE,CAAC,CAAA;QAC/C,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QACrB,OAAO,IAAI,CAAA;IACb,CAAC;CAAA;AAvBD,kDAuBC;AAED,SAAgB,yBAAyB,CAAC,QAAgB;IACxD,OAAO,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAA;AACnC,CAAC;AAFD,8DAEC;AAED,SAAsB,YAAY,CAAC,QAAkB;;;;QACnD,MAAM,KAAK,GAAa,EAAE,CAAA;QAC1B,MAAM,SAAS,SAAG,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;QAClE,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;YACrD,mBAAmB,EAAE,KAAK;SAC3B,CAAC,CAAA;;YAEF,KAAyB,IAAA,KAAA,cAAA,OAAO,CAAC,aAAa,EAAE,CAAA,IAAA;gBAArC,MAAM,IAAI,WAAA,CAAA;gBACnB,MAAM,YAAY,GAAG,IAAI;qBACtB,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC;qBACzB,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,CAAA;gBACjD,IAAI,CAAC,KAAK,CAAC,YAAY,YAAY,EAAE,CAAC,CAAA;gBACtC,4FAA4F;gBAC5F,KAAK,CAAC,IAAI,CAAC,GAAG,YAAY,EAAE,CAAC,CAAA;aAC9B;;;;;;;;;QAED,OAAO,KAAK,CAAA;;CACb;AAjBD,oCAiBC;AAED,SAAsB,UAAU,CAAC,QAAqB;;QACpD,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAA;IAC5C,CAAC;CAAA;AAFD,gCAEC;AAED,SAAe,UAAU,CAAC,GAAW;;QACnC,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC,CAAA;QACvC,IAAI,aAAa,GAAG,EAAE,CAAA;QACtB,IAAI;YACF,MAAM,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,YAAY,EAAE,EAAE,EAAE;gBACtC,gBAAgB,EAAE,IAAI;gBACtB,MAAM,EAAE,IAAI;gBACZ,SAAS,EAAE;oBACT,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;oBACpE,MAAM,EAAE,CAAC,IAAY,EAAU,EAAE,CAAC,CAAC,aAAa,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;iBACrE;aACF,CAAC,CAAA;SACH;QAAC,OAAO,GAAG,EAAE;YACZ,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,OAAO,CAAC,CAAA;SACxB;QAED,aAAa,GAAG,aAAa,CAAC,IAAI,EAAE,CAAA;QACpC,IAAI,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QACzB,OAAO,aAAa,CAAA;IACtB,CAAC;CAAA;AAED,0DAA0D;AAC1D,SAAsB,oBAAoB;;QACxC,IAAI,OAAO,CAAC,QAAQ,KAAK,OAAO,IAAI,CAAC,CAAC,MAAM,iBAAiB,EAAE,CAAC,EAAE;YAChE,sEAAsE;YACtE,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;QAED,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,MAAM,CAAC,CAAA;QAC9C,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,CAAA;QAE3C,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,6BAA6B,CAAC,EAAE;YACxE,wBAAwB;YACxB,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;aAAM,IAAI,CAAC,OAAO,IAAI,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,QAAQ,CAAC,EAAE;YACnD,4DAA4D;YAC5D,yDAAyD;YACzD,OAAO,6BAAiB,CAAC,eAAe,CAAA;SACzC;aAAM;YACL,OAAO,6BAAiB,CAAC,IAAI,CAAA;SAC9B;IACH,CAAC;CAAA;AAnBD,oDAmBC;AAED,SAAgB,gBAAgB,CAAC,iBAAoC;IACnE,OAAO,iBAAiB,KAAK,6BAAiB,CAAC,IAAI;QACjD,CAAC,CAAC,yBAAa,CAAC,IAAI;QACpB,CAAC,CAAC,yBAAa,CAAC,IAAI,CAAA;AACxB,CAAC;AAJD,4CAIC;AAED,SAAsB,iBAAiB;;QACrC,MAAM,aAAa,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QAC7C,OAAO,aAAa,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;IACxD,CAAC;CAAA;AAHD,8CAGC;AAED,SAAgB,aAAa,CAAI,IAAY,EAAE,KAAS;IACtD,IAAI,KAAK,KAAK,SAAS,EAAE;QACvB,MAAM,KAAK,CAAC,YAAY,IAAI,0BAA0B,CAAC,CAAA;KACxD;IAED,OAAO,KAAK,CAAA;AACd,CAAC;AAND,sCAMC;AAED,SAAgB,MAAM;IACpB,MAAM,KAAK,GAAG,IAAI,GAAG,CACnB,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,IAAI,oBAAoB,CACzD,CAAA;IACD,OAAO,KAAK,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,YAAY,CAAA;AACtD,CAAC;AALD,wBAKC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.d.ts b/node_modules/@actions/cache/lib/internal/constants.d.ts new file mode 100644 index 0000000000..36ac06e339 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.d.ts @@ -0,0 +1,12 @@ +export declare enum CacheFilename { + Gzip = "cache.tgz", + Zstd = "cache.tzst" +} +export declare enum CompressionMethod { + Gzip = "gzip", + ZstdWithoutLong = "zstd-without-long", + Zstd = "zstd" +} +export declare const DefaultRetryAttempts = 2; +export declare const DefaultRetryDelay = 5000; +export declare const SocketTimeout = 5000; diff --git a/node_modules/@actions/cache/lib/internal/constants.js b/node_modules/@actions/cache/lib/internal/constants.js new file mode 100644 index 0000000000..301068ffa1 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var CacheFilename; +(function (CacheFilename) { + CacheFilename["Gzip"] = "cache.tgz"; + CacheFilename["Zstd"] = "cache.tzst"; +})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); +var CompressionMethod; +(function (CompressionMethod) { + CompressionMethod["Gzip"] = "gzip"; + // Long range mode was added to zstd in v1.3.2. + // This enum is for earlier version of zstd that does not have --long support + CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; + CompressionMethod["Zstd"] = "zstd"; +})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); +// The default number of retry attempts. +exports.DefaultRetryAttempts = 2; +// The default delay in milliseconds between retry attempts. +exports.DefaultRetryDelay = 5000; +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. +exports.SocketTimeout = 5000; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/constants.js.map b/node_modules/@actions/cache/lib/internal/constants.js.map new file mode 100644 index 0000000000..ccdec94feb --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../src/internal/constants.ts"],"names":[],"mappings":";;AAAA,IAAY,aAGX;AAHD,WAAY,aAAa;IACvB,mCAAkB,CAAA;IAClB,oCAAmB,CAAA;AACrB,CAAC,EAHW,aAAa,GAAb,qBAAa,KAAb,qBAAa,QAGxB;AAED,IAAY,iBAMX;AAND,WAAY,iBAAiB;IAC3B,kCAAa,CAAA;IACb,+CAA+C;IAC/C,6EAA6E;IAC7E,0DAAqC,CAAA;IACrC,kCAAa,CAAA;AACf,CAAC,EANW,iBAAiB,GAAjB,yBAAiB,KAAjB,yBAAiB,QAM5B;AAED,wCAAwC;AAC3B,QAAA,oBAAoB,GAAG,CAAC,CAAA;AAErC,4DAA4D;AAC/C,QAAA,iBAAiB,GAAG,IAAI,CAAA;AAErC,6EAA6E;AAC7E,+EAA+E;AAC/E,cAAc;AACD,QAAA,aAAa,GAAG,IAAI,CAAA"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts new file mode 100644 index 0000000000..23a38b7ebf --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.d.ts @@ -0,0 +1,74 @@ +import { TransferProgressEvent } from '@azure/ms-rest-js'; +import { DownloadOptions } from '../options'; +/** + * Class for tracking the download state and displaying stats. + */ +export declare class DownloadProgress { + contentLength: number; + segmentIndex: number; + segmentSize: number; + segmentOffset: number; + receivedBytes: number; + startTime: number; + displayedComplete: boolean; + timeoutHandle?: ReturnType; + constructor(contentLength: number); + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize: number): void; + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes: number): void; + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes(): number; + /** + * Returns true if the download is complete. + */ + isDone(): boolean; + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display(): void; + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress(): (progress: TransferProgressEvent) => void; + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs?: number): void; + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer(): void; +} +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +export declare function downloadCacheHttpClient(archiveLocation: string, archivePath: string): Promise; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +export declare function downloadCacheStorageSDK(archiveLocation: string, archivePath: string, options: DownloadOptions): Promise; diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js b/node_modules/@actions/cache/lib/internal/downloadUtils.js new file mode 100644 index 0000000000..7c88504e42 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js @@ -0,0 +1,231 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const storage_blob_1 = require("@azure/storage-blob"); +const buffer = __importStar(require("buffer")); +const fs = __importStar(require("fs")); +const stream = __importStar(require("stream")); +const util = __importStar(require("util")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +const requestUtils_1 = require("./requestUtils"); +/** + * Pipes the body of a HTTP response to a stream + * + * @param response the HTTP response + * @param output the writable stream + */ +function pipeResponseToStream(response, output) { + return __awaiter(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); +} +/** + * Class for tracking the download state and displaying stats. + */ +class DownloadProgress { + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); + } + /** + * Progress to the next segment. Only call this method when the previous segment + * is complete. + * + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. + * + * @param receivedBytes the number of bytes received + */ + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / + (1024 * 1024) / + (elapsedTime / 1000)).toFixed(1); + core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } + } + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. + * + * @param delayInMs the delay between each write + */ + startDisplayTimer(delayInMs = 1000) { + const displayCallback = () => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + }; + this.timeoutHandle = setTimeout(displayCallback, delayInMs); + } + /** + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. + */ + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = undefined; + } + this.display(); + } +} +exports.DownloadProgress = DownloadProgress; +/** + * Download the cache using the Actions toolkit http-client + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + */ +function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const writeStream = fs.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient('actions/cache'); + const downloadResponse = yield requestUtils_1.retryHttpClientResponse('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers['content-length']; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug('Unable to validate download, no Content-Length header'); + } + }); +} +exports.downloadCacheHttpClient = downloadCacheHttpClient; +/** + * Download the cache using the Azure Storage SDK. Only call this method if the + * URL points to an Azure Storage endpoint. + * + * @param archiveLocation the URL for the cache + * @param archivePath the local path where the cache is saved + * @param options the download options with the defaults set + */ +function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + // We should never hit this condition, but just in case fall back to downloading the + // file as one large stream + core.debug('Unable to determine content length, downloading file with http-client...'); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } + else { + // Use downloadToBuffer for faster downloads, since internally it splits the + // file into 4 MB chunks which can then be parallelized and retried independently + // + // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB + // on 64-bit systems), split the download into multiple segments + // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. + const maxSegmentSize = Math.min(2147483647, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs.openSync(archivePath, 'w'); + try { + downloadProgress.startDisplayTimer(); + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield client.downloadToBuffer(segmentStart, segmentSize, { + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + }); + fs.writeFileSync(fd, result); + } + } + finally { + downloadProgress.stopDisplayTimer(); + fs.closeSync(fd); + } + } + }); +} +exports.downloadCacheStorageSDK = downloadCacheStorageSDK; +//# sourceMappingURL=downloadUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/downloadUtils.js.map b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map new file mode 100644 index 0000000000..2a308f2bee --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/downloadUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"downloadUtils.js","sourceRoot":"","sources":["../../src/internal/downloadUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAAmE;AACnE,sDAAmD;AAEnD,+CAAgC;AAChC,uCAAwB;AACxB,+CAAgC;AAChC,2CAA4B;AAE5B,oDAAqC;AACrC,2CAAyC;AAEzC,iDAAsD;AAEtD;;;;;GAKG;AACH,SAAe,oBAAoB,CACjC,QAA4B,EAC5B,MAA6B;;QAE7B,MAAM,QAAQ,GAAG,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAA;QAChD,MAAM,QAAQ,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;IAC1C,CAAC;CAAA;AAED;;GAEG;AACH,MAAa,gBAAgB;IAU3B,YAAY,aAAqB;QAC/B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;QAClC,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACrB,IAAI,CAAC,WAAW,GAAG,CAAC,CAAA;QACpB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QACtB,IAAI,CAAC,iBAAiB,GAAG,KAAK,CAAA;QAC9B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;IAC7B,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAAmB;QAC7B,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,WAAW,CAAA;QAC1D,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;QACzC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAA;QAC9B,IAAI,CAAC,aAAa,GAAG,CAAC,CAAA;QAEtB,IAAI,CAAC,KAAK,CACR,iCAAiC,IAAI,CAAC,aAAa,gBAAgB,IAAI,CAAC,WAAW,KAAK,CACzF,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,gBAAgB,CAAC,aAAqB;QACpC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAA;IACpC,CAAC;IAED;;OAEG;IACH,mBAAmB;QACjB,OAAO,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;IAChD,CAAC;IAED;;OAEG;IACH,MAAM;QACJ,OAAO,IAAI,CAAC,mBAAmB,EAAE,KAAK,IAAI,CAAC,aAAa,CAAA;IAC1D,CAAC;IAED;;;OAGG;IACH,OAAO;QACL,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,OAAM;SACP;QAED,MAAM,gBAAgB,GAAG,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAA;QAChE,MAAM,UAAU,GAAG,CAAC,GAAG,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,OAAO,CACxE,CAAC,CACF,CAAA;QACD,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,SAAS,CAAA;QAC/C,MAAM,aAAa,GAAG,CACpB,gBAAgB;YAChB,CAAC,IAAI,GAAG,IAAI,CAAC;YACb,CAAC,WAAW,GAAG,IAAI,CAAC,CACrB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;QAEZ,IAAI,CAAC,IAAI,CACP,YAAY,gBAAgB,OAAO,IAAI,CAAC,aAAa,KAAK,UAAU,OAAO,aAAa,UAAU,CACnG,CAAA;QAED,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAA;SAC9B;IACH,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,QAA+B,EAAE,EAAE;YACzC,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAA;QAC7C,CAAC,CAAA;IACH,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CAAC,SAAS,GAAG,IAAI;QAChC,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAC,OAAO,EAAE,CAAA;YAEd,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;aAC5D;QACH,CAAC,CAAA;QAED,IAAI,CAAC,aAAa,GAAG,UAAU,CAAC,eAAe,EAAE,SAAS,CAAC,CAAA;IAC7D,CAAC;IAED;;;;OAIG;IACH,gBAAgB;QACd,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,YAAY,CAAC,IAAI,CAAC,aAAa,CAAC,CAAA;YAChC,IAAI,CAAC,aAAa,GAAG,SAAS,CAAA;SAC/B;QAED,IAAI,CAAC,OAAO,EAAE,CAAA;IAChB,CAAC;CACF;AAhID,4CAgIC;AAED;;;;;GAKG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB;;QAEnB,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAA;QACrD,MAAM,UAAU,GAAG,IAAI,wBAAU,CAAC,eAAe,CAAC,CAAA;QAClD,MAAM,gBAAgB,GAAG,MAAM,sCAAuB,CACpD,eAAe,EACf,GAAS,EAAE,gDAAC,OAAA,UAAU,CAAC,GAAG,CAAC,eAAe,CAAC,CAAA,GAAA,CAC5C,CAAA;QAED,yDAAyD;QACzD,gBAAgB,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,CAAC,yBAAa,EAAE,GAAG,EAAE;YAC7D,gBAAgB,CAAC,OAAO,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,CAAC,KAAK,CAAC,6CAA6C,yBAAa,KAAK,CAAC,CAAA;QAC7E,CAAC,CAAC,CAAA;QAEF,MAAM,oBAAoB,CAAC,gBAAgB,EAAE,WAAW,CAAC,CAAA;QAEzD,0BAA0B;QAC1B,MAAM,mBAAmB,GAAG,gBAAgB,CAAC,OAAO,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;QAE9E,IAAI,mBAAmB,EAAE;YACvB,MAAM,cAAc,GAAG,QAAQ,CAAC,mBAAmB,CAAC,CAAA;YACpD,MAAM,YAAY,GAAG,KAAK,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAA;YAEjE,IAAI,YAAY,KAAK,cAAc,EAAE;gBACnC,MAAM,IAAI,KAAK,CACb,4CAA4C,cAAc,uBAAuB,YAAY,EAAE,CAChG,CAAA;aACF;SACF;aAAM;YACL,IAAI,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;SACpE;IACH,CAAC;CAAA;AAlCD,0DAkCC;AAED;;;;;;;GAOG;AACH,SAAsB,uBAAuB,CAC3C,eAAuB,EACvB,WAAmB,EACnB,OAAwB;;;QAExB,MAAM,MAAM,GAAG,IAAI,8BAAe,CAAC,eAAe,EAAE,SAAS,EAAE;YAC7D,YAAY,EAAE;gBACZ,6DAA6D;gBAC7D,mDAAmD;gBACnD,cAAc,EAAE,OAAO,CAAC,WAAW;aACpC;SACF,CAAC,CAAA;QAEF,MAAM,UAAU,GAAG,MAAM,MAAM,CAAC,aAAa,EAAE,CAAA;QAC/C,MAAM,aAAa,SAAG,UAAU,CAAC,aAAa,mCAAI,CAAC,CAAC,CAAA;QAEpD,IAAI,aAAa,GAAG,CAAC,EAAE;YACrB,oFAAoF;YACpF,2BAA2B;YAC3B,IAAI,CAAC,KAAK,CACR,0EAA0E,CAC3E,CAAA;YAED,MAAM,uBAAuB,CAAC,eAAe,EAAE,WAAW,CAAC,CAAA;SAC5D;aAAM;YACL,4EAA4E;YAC5E,iFAAiF;YACjF,EAAE;YACF,mFAAmF;YACnF,gEAAgE;YAChE,oGAAoG;YACpG,MAAM,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,MAAM,CAAC,SAAS,CAAC,UAAU,CAAC,CAAA;YACxE,MAAM,gBAAgB,GAAG,IAAI,gBAAgB,CAAC,aAAa,CAAC,CAAA;YAE5D,MAAM,EAAE,GAAG,EAAE,CAAC,QAAQ,CAAC,WAAW,EAAE,GAAG,CAAC,CAAA;YAExC,IAAI;gBACF,gBAAgB,CAAC,iBAAiB,EAAE,CAAA;gBAEpC,OAAO,CAAC,gBAAgB,CAAC,MAAM,EAAE,EAAE;oBACjC,MAAM,YAAY,GAChB,gBAAgB,CAAC,aAAa,GAAG,gBAAgB,CAAC,WAAW,CAAA;oBAE/D,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAC1B,cAAc,EACd,aAAa,GAAG,YAAY,CAC7B,CAAA;oBAED,gBAAgB,CAAC,WAAW,CAAC,WAAW,CAAC,CAAA;oBAEzC,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,gBAAgB,CAC1C,YAAY,EACZ,WAAW,EACX;wBACE,WAAW,EAAE,OAAO,CAAC,mBAAmB;wBACxC,UAAU,EAAE,gBAAgB,CAAC,UAAU,EAAE;qBAC1C,CACF,CAAA;oBAED,EAAE,CAAC,aAAa,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;iBAC7B;aACF;oBAAS;gBACR,gBAAgB,CAAC,gBAAgB,EAAE,CAAA;gBACnC,EAAE,CAAC,SAAS,CAAC,EAAE,CAAC,CAAA;aACjB;SACF;;CACF;AAlED,0DAkEC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.d.ts b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts new file mode 100644 index 0000000000..5ca50849a5 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.d.ts @@ -0,0 +1,8 @@ +import { HttpClientResponse } from '@actions/http-client'; +import { ITypedResponseWithError } from './contracts'; +export declare function isSuccessStatusCode(statusCode?: number): boolean; +export declare function isServerErrorStatusCode(statusCode?: number): boolean; +export declare function isRetryableStatusCode(statusCode?: number): boolean; +export declare function retry(name: string, method: () => Promise, getStatusCode: (arg0: T) => number | undefined, maxAttempts?: number, delay?: number, onError?: ((arg0: Error) => T | undefined) | undefined): Promise; +export declare function retryTypedResponse(name: string, method: () => Promise>, maxAttempts?: number, delay?: number): Promise>; +export declare function retryHttpClientResponse(name: string, method: () => Promise, maxAttempts?: number, delay?: number): Promise; diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js b/node_modules/@actions/cache/lib/internal/requestUtils.js new file mode 100644 index 0000000000..42c2aba703 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js @@ -0,0 +1,120 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const http_client_1 = require("@actions/http-client"); +const constants_1 = require("./constants"); +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} +exports.isServerErrorStatusCode = isServerErrorStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { + return __awaiter(this, void 0, void 0, function* () { + let errorMessage = ''; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + try { + response = yield method(); + } + catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {}, + error + }; + } + else { + return undefined; + } + }); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/requestUtils.js.map b/node_modules/@actions/cache/lib/internal/requestUtils.js.map new file mode 100644 index 0000000000..67cfcaa3f5 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../src/internal/requestUtils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,sDAI6B;AAC7B,2CAAmE;AAGnE,SAAgB,mBAAmB,CAAC,UAAmB;IACrD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,OAAO,UAAU,IAAI,GAAG,IAAI,UAAU,GAAG,GAAG,CAAA;AAC9C,CAAC;AALD,kDAKC;AAED,SAAgB,uBAAuB,CAAC,UAAmB;IACzD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,IAAI,CAAA;KACZ;IACD,OAAO,UAAU,IAAI,GAAG,CAAA;AAC1B,CAAC;AALD,0DAKC;AAED,SAAgB,qBAAqB,CAAC,UAAmB;IACvD,IAAI,CAAC,UAAU,EAAE;QACf,OAAO,KAAK,CAAA;KACb;IACD,MAAM,oBAAoB,GAAG;QAC3B,uBAAS,CAAC,UAAU;QACpB,uBAAS,CAAC,kBAAkB;QAC5B,uBAAS,CAAC,cAAc;KACzB,CAAA;IACD,OAAO,oBAAoB,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAA;AAClD,CAAC;AAVD,sDAUC;AAED,SAAe,KAAK,CAAC,YAAoB;;QACvC,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAA;IAClE,CAAC;CAAA;AAED,SAAsB,KAAK,CACzB,IAAY,EACZ,MAAwB,EACxB,aAA8C,EAC9C,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB,EACzB,UAAwD,SAAS;;QAEjE,IAAI,YAAY,GAAG,EAAE,CAAA;QACrB,IAAI,OAAO,GAAG,CAAC,CAAA;QAEf,OAAO,OAAO,IAAI,WAAW,EAAE;YAC7B,IAAI,QAAQ,GAAkB,SAAS,CAAA;YACvC,IAAI,UAAU,GAAuB,SAAS,CAAA;YAC9C,IAAI,WAAW,GAAG,KAAK,CAAA;YAEvB,IAAI;gBACF,QAAQ,GAAG,MAAM,MAAM,EAAE,CAAA;aAC1B;YAAC,OAAO,KAAK,EAAE;gBACd,IAAI,OAAO,EAAE;oBACX,QAAQ,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;iBAC1B;gBAED,WAAW,GAAG,IAAI,CAAA;gBAClB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAA;aAC7B;YAED,IAAI,QAAQ,EAAE;gBACZ,UAAU,GAAG,aAAa,CAAC,QAAQ,CAAC,CAAA;gBAEpC,IAAI,CAAC,uBAAuB,CAAC,UAAU,CAAC,EAAE;oBACxC,OAAO,QAAQ,CAAA;iBAChB;aACF;YAED,IAAI,UAAU,EAAE;gBACd,WAAW,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAA;gBAC/C,YAAY,GAAG,gCAAgC,UAAU,EAAE,CAAA;aAC5D;YAED,IAAI,CAAC,KAAK,CACR,GAAG,IAAI,cAAc,OAAO,OAAO,WAAW,uBAAuB,YAAY,EAAE,CACpF,CAAA;YAED,IAAI,CAAC,WAAW,EAAE;gBAChB,IAAI,CAAC,KAAK,CAAC,GAAG,IAAI,2BAA2B,CAAC,CAAA;gBAC9C,MAAK;aACN;YAED,MAAM,KAAK,CAAC,KAAK,CAAC,CAAA;YAClB,OAAO,EAAE,CAAA;SACV;QAED,MAAM,KAAK,CAAC,GAAG,IAAI,YAAY,YAAY,EAAE,CAAC,CAAA;IAChD,CAAC;CAAA;AAtDD,sBAsDC;AAED,SAAsB,kBAAkB,CACtC,IAAY,EACZ,MAAiD,EACjD,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAAoC,EAAE,EAAE,CAAC,QAAQ,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK;QACL,8EAA8E;QAC9E,iEAAiE;QACjE,CAAC,KAAY,EAAE,EAAE;YACf,IAAI,KAAK,YAAY,6BAAe,EAAE;gBACpC,OAAO;oBACL,UAAU,EAAE,KAAK,CAAC,UAAU;oBAC5B,MAAM,EAAE,IAAI;oBACZ,OAAO,EAAE,EAAE;oBACX,KAAK;iBACN,CAAA;aACF;iBAAM;gBACL,OAAO,SAAS,CAAA;aACjB;QACH,CAAC,CACF,CAAA;IACH,CAAC;CAAA;AA3BD,gDA2BC;AAED,SAAsB,uBAAuB,CAC3C,IAAY,EACZ,MAAyC,EACzC,WAAW,GAAG,gCAAoB,EAClC,KAAK,GAAG,6BAAiB;;QAEzB,OAAO,MAAM,KAAK,CAChB,IAAI,EACJ,MAAM,EACN,CAAC,QAA4B,EAAE,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,UAAU,EAC7D,WAAW,EACX,KAAK,CACN,CAAA;IACH,CAAC;CAAA;AAbD,0DAaC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.d.ts b/node_modules/@actions/cache/lib/internal/tar.d.ts new file mode 100644 index 0000000000..027b2e17af --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.d.ts @@ -0,0 +1,4 @@ +import { CompressionMethod } from './constants'; +export declare function extractTar(archivePath: string, compressionMethod: CompressionMethod): Promise; +export declare function createTar(archiveFolder: string, sourceDirectories: string[], compressionMethod: CompressionMethod): Promise; +export declare function listTar(archivePath: string, compressionMethod: CompressionMethod): Promise; diff --git a/node_modules/@actions/cache/lib/internal/tar.js b/node_modules/@actions/cache/lib/internal/tar.js new file mode 100644 index 0000000000..202aeb5a5e --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js @@ -0,0 +1,166 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const exec_1 = require("@actions/exec"); +const io = __importStar(require("@actions/io")); +const fs_1 = require("fs"); +const path = __importStar(require("path")); +const utils = __importStar(require("./cacheUtils")); +const constants_1 = require("./constants"); +function getTarPath(args, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + switch (process.platform) { + case 'win32': { + const systemTar = `${process.env['windir']}\\System32\\tar.exe`; + if (compressionMethod !== constants_1.CompressionMethod.Gzip) { + // We only use zstandard compression on windows when gnu tar is installed due to + // a bug with compressing large files with bsdtar + zstd + args.push('--force-local'); + } + else if (fs_1.existsSync(systemTar)) { + return systemTar; + } + else if (yield utils.isGnuTarInstalled()) { + args.push('--force-local'); + } + break; + } + case 'darwin': { + const gnuTar = yield io.which('gtar', false); + if (gnuTar) { + // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 + args.push('--delay-directory-restore'); + return gnuTar; + } + break; + } + default: + break; + } + return yield io.which('tar', true); + }); +} +function execTar(args, compressionMethod, cwd) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); + } + catch (error) { + throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + }); +} +function getWorkingDirectory() { + var _a; + return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); +} +function extractTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-xf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') + ]; + yield execTar(args, compressionMethod); + }); +} +exports.extractTar = extractTar; +function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = 'manifest.txt'; + const cacheFileName = utils.getCacheFileName(compressionMethod); + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); + const workingDirectory = getWorkingDirectory(); + // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. + // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -T0 --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -T0']; + default: + return ['-z']; + } + } + const args = [ + '--posix', + ...getCompressionProgram(), + '-cf', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--exclude', + cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P', + '-C', + workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '--files-from', + manifestFilename + ]; + yield execTar(args, compressionMethod, archiveFolder); + }); +} +exports.createTar = createTar; +function listTar(archivePath, compressionMethod) { + return __awaiter(this, void 0, void 0, function* () { + // --d: Decompress. + // --long=#: Enables long distance matching with # bits. + // Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. + // Using 30 here because we also support 32-bit self-hosted runners. + function getCompressionProgram() { + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return ['--use-compress-program', 'zstd -d --long=30']; + case constants_1.CompressionMethod.ZstdWithoutLong: + return ['--use-compress-program', 'zstd -d']; + default: + return ['-z']; + } + } + const args = [ + ...getCompressionProgram(), + '-tf', + archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), + '-P' + ]; + yield execTar(args, compressionMethod); + }); +} +exports.listTar = listTar; +//# sourceMappingURL=tar.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/internal/tar.js.map b/node_modules/@actions/cache/lib/internal/tar.js.map new file mode 100644 index 0000000000..a7181ce8e2 --- /dev/null +++ b/node_modules/@actions/cache/lib/internal/tar.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tar.js","sourceRoot":"","sources":["../../src/internal/tar.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;AAAA,wCAAkC;AAClC,gDAAiC;AACjC,2BAA4C;AAC5C,2CAA4B;AAC5B,oDAAqC;AACrC,2CAA6C;AAE7C,SAAe,UAAU,CACvB,IAAc,EACd,iBAAoC;;QAEpC,QAAQ,OAAO,CAAC,QAAQ,EAAE;YACxB,KAAK,OAAO,CAAC,CAAC;gBACZ,MAAM,SAAS,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,qBAAqB,CAAA;gBAC/D,IAAI,iBAAiB,KAAK,6BAAiB,CAAC,IAAI,EAAE;oBAChD,gFAAgF;oBAChF,wDAAwD;oBACxD,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;iBAC3B;qBAAM,IAAI,eAAU,CAAC,SAAS,CAAC,EAAE;oBAChC,OAAO,SAAS,CAAA;iBACjB;qBAAM,IAAI,MAAM,KAAK,CAAC,iBAAiB,EAAE,EAAE;oBAC1C,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAA;iBAC3B;gBACD,MAAK;aACN;YACD,KAAK,QAAQ,CAAC,CAAC;gBACb,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;gBAC5C,IAAI,MAAM,EAAE;oBACV,0HAA0H;oBAC1H,IAAI,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAA;oBACtC,OAAO,MAAM,CAAA;iBACd;gBACD,MAAK;aACN;YACD;gBACE,MAAK;SACR;QACD,OAAO,MAAM,EAAE,CAAC,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;IACpC,CAAC;CAAA;AAED,SAAe,OAAO,CACpB,IAAc,EACd,iBAAoC,EACpC,GAAY;;QAEZ,IAAI;YACF,MAAM,WAAI,CAAC,IAAI,MAAM,UAAU,CAAC,IAAI,EAAE,iBAAiB,CAAC,GAAG,EAAE,IAAI,EAAE,EAAC,GAAG,EAAC,CAAC,CAAA;SAC1E;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,0BAA0B,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,EAAE,CAAC,CAAA;SAC5D;IACH,CAAC;CAAA;AAED,SAAS,mBAAmB;;IAC1B,aAAO,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,mCAAI,OAAO,CAAC,GAAG,EAAE,CAAA;AACzD,CAAC;AAED,SAAsB,UAAU,CAC9B,WAAmB,EACnB,iBAAoC;;QAEpC,uCAAuC;QACvC,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAC9C,MAAM,EAAE,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAA;QACjC,mBAAmB;QACnB,iHAAiH;QACjH,oEAAoE;QACpE,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,mBAAmB,CAAC,CAAA;gBACxD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAA;gBAC9C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC1D,IAAI;YACJ,IAAI;YACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;SAChE,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,CAAC,CAAA;IACxC,CAAC;CAAA;AA7BD,gCA6BC;AAED,SAAsB,SAAS,CAC7B,aAAqB,EACrB,iBAA2B,EAC3B,iBAAoC;;QAEpC,0EAA0E;QAC1E,MAAM,gBAAgB,GAAG,cAAc,CAAA;QACvC,MAAM,aAAa,GAAG,KAAK,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,CAAA;QAC/D,kBAAa,CACX,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE,gBAAgB,CAAC,EAC1C,iBAAiB,CAAC,IAAI,CAAC,IAAI,CAAC,CAC7B,CAAA;QACD,MAAM,gBAAgB,GAAG,mBAAmB,EAAE,CAAA;QAE9C,+GAA+G;QAC/G,iHAAiH;QACjH,oEAAoE;QACpE,0GAA0G;QAC1G,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,oBAAoB,CAAC,CAAA;gBACzD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,UAAU,CAAC,CAAA;gBAC/C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,SAAS;YACT,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC5D,WAAW;YACX,aAAa,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC5D,IAAI;YACJ,IAAI;YACJ,gBAAgB,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC/D,cAAc;YACd,gBAAgB;SACjB,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,EAAE,aAAa,CAAC,CAAA;IACvD,CAAC;CAAA;AA1CD,8BA0CC;AAED,SAAsB,OAAO,CAC3B,WAAmB,EACnB,iBAAoC;;QAEpC,mBAAmB;QACnB,wDAAwD;QACxD,2DAA2D;QAC3D,oEAAoE;QACpE,SAAS,qBAAqB;YAC5B,QAAQ,iBAAiB,EAAE;gBACzB,KAAK,6BAAiB,CAAC,IAAI;oBACzB,OAAO,CAAC,wBAAwB,EAAE,mBAAmB,CAAC,CAAA;gBACxD,KAAK,6BAAiB,CAAC,eAAe;oBACpC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAA;gBAC9C;oBACE,OAAO,CAAC,IAAI,CAAC,CAAA;aAChB;QACH,CAAC;QACD,MAAM,IAAI,GAAG;YACX,GAAG,qBAAqB,EAAE;YAC1B,KAAK;YACL,WAAW,CAAC,OAAO,CAAC,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC;YAC1D,IAAI;SACL,CAAA;QACD,MAAM,OAAO,CAAC,IAAI,EAAE,iBAAiB,CAAC,CAAA;IACxC,CAAC;CAAA;AAzBD,0BAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.d.ts b/node_modules/@actions/cache/lib/options.d.ts new file mode 100644 index 0000000000..0cb0d6e12f --- /dev/null +++ b/node_modules/@actions/cache/lib/options.d.ts @@ -0,0 +1,56 @@ +/** + * Options to control cache upload + */ +export interface UploadOptions { + /** + * Number of parallel cache upload + * + * @default 4 + */ + uploadConcurrency?: number; + /** + * Maximum chunk size in bytes for cache upload + * + * @default 32MB + */ + uploadChunkSize?: number; +} +/** + * Options to control cache download + */ +export interface DownloadOptions { + /** + * Indicates whether to use the Azure Blob SDK to download caches + * that are stored on Azure Blob Storage to improve reliability and + * performance + * + * @default true + */ + useAzureSdk?: boolean; + /** + * Number of parallel downloads (this option only applies when using + * the Azure SDK) + * + * @default 8 + */ + downloadConcurrency?: number; + /** + * Maximum time for each download request, in milliseconds (this + * option only applies when using the Azure SDK) + * + * @default 30000 + */ + timeoutInMs?: number; +} +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +export declare function getUploadOptions(copy?: UploadOptions): UploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +export declare function getDownloadOptions(copy?: DownloadOptions): DownloadOptions; diff --git a/node_modules/@actions/cache/lib/options.js b/node_modules/@actions/cache/lib/options.js new file mode 100644 index 0000000000..1a7877a60e --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js @@ -0,0 +1,62 @@ +"use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +/** + * Returns a copy of the upload options with defaults filled in. + * + * @param copy the original upload options + */ +function getUploadOptions(copy) { + const result = { + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.uploadConcurrency === 'number') { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === 'number') { + result.uploadChunkSize = copy.uploadChunkSize; + } + } + core.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; +} +exports.getUploadOptions = getUploadOptions; +/** + * Returns a copy of the download options with defaults filled in. + * + * @param copy the original download options + */ +function getDownloadOptions(copy) { + const result = { + useAzureSdk: true, + downloadConcurrency: 8, + timeoutInMs: 30000 + }; + if (copy) { + if (typeof copy.useAzureSdk === 'boolean') { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.downloadConcurrency === 'number') { + result.downloadConcurrency = copy.downloadConcurrency; + } + if (typeof copy.timeoutInMs === 'number') { + result.timeoutInMs = copy.timeoutInMs; + } + } + core.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core.debug(`Download concurrency: ${result.downloadConcurrency}`); + core.debug(`Request timeout (ms): ${result.timeoutInMs}`); + return result; +} +exports.getDownloadOptions = getDownloadOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/@actions/cache/lib/options.js.map b/node_modules/@actions/cache/lib/options.js.map new file mode 100644 index 0000000000..2cda122ed8 --- /dev/null +++ b/node_modules/@actions/cache/lib/options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"options.js","sourceRoot":"","sources":["../src/options.ts"],"names":[],"mappings":";;;;;;;;;AAAA,oDAAqC;AAkDrC;;;;GAIG;AACH,SAAgB,gBAAgB,CAAC,IAAoB;IACnD,MAAM,MAAM,GAAkB;QAC5B,iBAAiB,EAAE,CAAC;QACpB,eAAe,EAAE,EAAE,GAAG,IAAI,GAAG,IAAI;KAClC,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,iBAAiB,KAAK,QAAQ,EAAE;YAC9C,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAA;SAClD;QAED,IAAI,OAAO,IAAI,CAAC,eAAe,KAAK,QAAQ,EAAE;YAC5C,MAAM,CAAC,eAAe,GAAG,IAAI,CAAC,eAAe,CAAA;SAC9C;KACF;IAED,IAAI,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,iBAAiB,EAAE,CAAC,CAAA;IAC7D,IAAI,CAAC,KAAK,CAAC,sBAAsB,MAAM,CAAC,eAAe,EAAE,CAAC,CAAA;IAE1D,OAAO,MAAM,CAAA;AACf,CAAC;AApBD,4CAoBC;AAED;;;;GAIG;AACH,SAAgB,kBAAkB,CAAC,IAAsB;IACvD,MAAM,MAAM,GAAoB;QAC9B,WAAW,EAAE,IAAI;QACjB,mBAAmB,EAAE,CAAC;QACtB,WAAW,EAAE,KAAK;KACnB,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,SAAS,EAAE;YACzC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;YAChD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;SACtD;QAED,IAAI,OAAO,IAAI,CAAC,WAAW,KAAK,QAAQ,EAAE;YACxC,MAAM,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,CAAA;SACtC;KACF;IAED,IAAI,CAAC,KAAK,CAAC,kBAAkB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAClD,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,mBAAmB,EAAE,CAAC,CAAA;IACjE,IAAI,CAAC,KAAK,CAAC,yBAAyB,MAAM,CAAC,WAAW,EAAE,CAAC,CAAA;IAEzD,OAAO,MAAM,CAAA;AACf,CAAC;AA1BD,gDA0BC"} \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/.bin/semver b/node_modules/@actions/cache/node_modules/.bin/semver new file mode 120000 index 0000000000..5aaadf42c4 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/.bin/uuid b/node_modules/@actions/cache/node_modules/.bin/uuid new file mode 120000 index 0000000000..b3e45bc531 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/.bin/uuid @@ -0,0 +1 @@ +../uuid/bin/uuid \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/semver/CHANGELOG.md b/node_modules/@actions/cache/node_modules/semver/CHANGELOG.md new file mode 100644 index 0000000000..f567dd3fe7 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/CHANGELOG.md @@ -0,0 +1,70 @@ +# changes log + +## 6.2.0 + +* Coerce numbers to strings when passed to semver.coerce() +* Add `rtl` option to coerce from right to left + +## 6.1.3 + +* Handle X-ranges properly in includePrerelease mode + +## 6.1.2 + +* Do not throw when testing invalid version strings + +## 6.1.1 + +* Add options support for semver.coerce() +* Handle undefined version passed to Range.test + +## 6.1.0 + +* Add semver.compareBuild function +* Support `*` in semver.intersects + +## 6.0 + +* Fix `intersects` logic. + + This is technically a bug fix, but since it is also a change to behavior + that may require users updating their code, it is marked as a major + version increment. + +## 5.7 + +* Add `minVersion` method + +## 5.6 + +* Move boolean `loose` param to an options object, with + backwards-compatibility protection. +* Add ability to opt out of special prerelease version handling with + the `includePrerelease` option flag. + +## 5.5 + +* Add version coercion capabilities + +## 5.4 + +* Add intersection checking + +## 5.3 + +* Add `minSatisfying` method + +## 5.2 + +* Add `prerelease(v)` that returns prerelease components + +## 5.1 + +* Add Backus-Naur for ranges +* Remove excessively cute inspection methods + +## 5.0 + +* Remove AMD/Browserified build artifacts +* Fix ltr and gtr when using the `*` range +* Fix for range `*` with a prerelease identifier diff --git a/node_modules/@actions/cache/node_modules/semver/LICENSE b/node_modules/@actions/cache/node_modules/semver/LICENSE new file mode 100644 index 0000000000..19129e315f --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@actions/cache/node_modules/semver/README.md b/node_modules/@actions/cache/node_modules/semver/README.md new file mode 100644 index 0000000000..2293a14fdc --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/README.md @@ -0,0 +1,443 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Integer.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided version is not valid a null will be returned. This does not work for ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `2.1.5` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` diff --git a/node_modules/@actions/cache/node_modules/semver/bin/semver.js b/node_modules/@actions/cache/node_modules/semver/bin/semver.js new file mode 100755 index 0000000000..666034a75d --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/bin/semver.js @@ -0,0 +1,174 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + +var versions = [] + +var range = [] + +var inc = null + +var version = require('../package.json').version + +var loose = false + +var includePrerelease = false + +var coerce = false + +var rtl = false + +var identifier + +var semver = require('../semver') + +var reverse = false + +var options = {} + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + a = a.slice(0, indexOfEqualSign) + argv.unshift(a.slice(indexOfEqualSign + 1)) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + var options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } + + versions = versions.map(function (v) { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter(function (v) { + return semver.valid(v) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) { return failInc() } + + for (var i = 0, l = range.length; i < l; i++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error('--inc can only be used on a single version with no range') + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? 'rcompare' : 'compare' + versions.sort(function (a, b) { + return semver[compare](a, b, options) + }).map(function (v) { + return semver.clean(v, options) + }).map(function (v) { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach(function (v, i, _) { console.log(v) }) +} + +function help () { + console.log(['SemVer ' + version, + '', + 'A JavaScript implementation of the https://semver.org/ specification', + 'Copyright Isaac Z. Schlueter', + '', + 'Usage: semver [options] [ [...]]', + 'Prints valid versions sorted by SemVer precedence', + '', + 'Options:', + '-r --range ', + ' Print versions that match the specified range.', + '', + '-i --increment []', + ' Increment a version by the specified level. Level can', + ' be one of: major, minor, patch, premajor, preminor,', + " prepatch, or prerelease. Default level is 'patch'.", + ' Only one version may be specified.', + '', + '--preid ', + ' Identifier to be used to prefix premajor, preminor,', + ' prepatch or prerelease version increments.', + '', + '-l --loose', + ' Interpret versions and ranges loosely', + '', + '-p --include-prerelease', + ' Always include prerelease versions in range matching', + '', + '-c --coerce', + ' Coerce a string into SemVer if possible', + ' (does not imply --loose)', + '', + '--rtl', + ' Coerce version strings right to left', + '', + '--ltr', + ' Coerce version strings left to right (default)', + '', + 'Program exits successfully if any valid version satisfies', + 'all supplied ranges, and prints all satisfying versions.', + '', + 'If no satisfying versions are found, then exits failure.', + '', + 'Versions are printed in ascending order, so supplying', + 'multiple versions to the utility will just sort them.' + ].join('\n')) +} diff --git a/node_modules/@actions/cache/node_modules/semver/package.json b/node_modules/@actions/cache/node_modules/semver/package.json new file mode 100644 index 0000000000..bdd442f500 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/package.json @@ -0,0 +1,28 @@ +{ + "name": "semver", + "version": "6.3.0", + "description": "The semantic version parser used by npm.", + "main": "semver.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "devDependencies": { + "tap": "^14.3.1" + }, + "license": "ISC", + "repository": "https://github.com/npm/node-semver", + "bin": { + "semver": "./bin/semver.js" + }, + "files": [ + "bin", + "range.bnf", + "semver.js" + ], + "tap": { + "check-coverage": true + } +} diff --git a/node_modules/@actions/cache/node_modules/semver/range.bnf b/node_modules/@actions/cache/node_modules/semver/range.bnf new file mode 100644 index 0000000000..d4c6ae0d76 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/@actions/cache/node_modules/semver/semver.js b/node_modules/@actions/cache/node_modules/semver/semver.js new file mode 100644 index 0000000000..636fa4365a --- /dev/null +++ b/node_modules/@actions/cache/node_modules/semver/semver.js @@ -0,0 +1,1596 @@ +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? re[t.LOOSE] : re[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = re[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + re[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} diff --git a/node_modules/@actions/cache/node_modules/uuid/AUTHORS b/node_modules/@actions/cache/node_modules/uuid/AUTHORS new file mode 100644 index 0000000000..5a10523062 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/AUTHORS @@ -0,0 +1,5 @@ +Robert Kieffer +Christoph Tavan +AJ ONeal +Vincent Voyer +Roman Shtylman diff --git a/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md b/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md new file mode 100644 index 0000000000..f811b8a0cb --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/CHANGELOG.md @@ -0,0 +1,119 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +## [3.4.0](https://github.com/uuidjs/uuid/compare/v3.3.3...v3.4.0) (2020-01-16) + + +### Features + +* rename repository to github:uuidjs/uuid ([#351](https://github.com/uuidjs/uuid/issues/351)) ([e2d7314](https://github.com/uuidjs/uuid/commit/e2d7314)), closes [#338](https://github.com/uuidjs/uuid/issues/338) + +### [3.3.3](https://github.com/uuidjs/uuid/compare/v3.3.2...v3.3.3) (2019-08-19) + + +## [3.3.2](https://github.com/uuidjs/uuid/compare/v3.3.1...v3.3.2) (2018-06-28) + + +### Bug Fixes + +* typo ([305d877](https://github.com/uuidjs/uuid/commit/305d877)) + + + + +## [3.3.1](https://github.com/uuidjs/uuid/compare/v3.3.0...v3.3.1) (2018-06-28) + + +### Bug Fixes + +* fix [#284](https://github.com/uuidjs/uuid/issues/284) by setting function name in try-catch ([f2a60f2](https://github.com/uuidjs/uuid/commit/f2a60f2)) + + + + +# [3.3.0](https://github.com/uuidjs/uuid/compare/v3.2.1...v3.3.0) (2018-06-22) + + +### Bug Fixes + +* assignment to readonly property to allow running in strict mode ([#270](https://github.com/uuidjs/uuid/issues/270)) ([d062fdc](https://github.com/uuidjs/uuid/commit/d062fdc)) +* fix [#229](https://github.com/uuidjs/uuid/issues/229) ([c9684d4](https://github.com/uuidjs/uuid/commit/c9684d4)) +* Get correct version of IE11 crypto ([#274](https://github.com/uuidjs/uuid/issues/274)) ([153d331](https://github.com/uuidjs/uuid/commit/153d331)) +* mem issue when generating uuid ([#267](https://github.com/uuidjs/uuid/issues/267)) ([c47702c](https://github.com/uuidjs/uuid/commit/c47702c)) + +### Features + +* enforce Conventional Commit style commit messages ([#282](https://github.com/uuidjs/uuid/issues/282)) ([cc9a182](https://github.com/uuidjs/uuid/commit/cc9a182)) + + + +## [3.2.1](https://github.com/uuidjs/uuid/compare/v3.2.0...v3.2.1) (2018-01-16) + + +### Bug Fixes + +* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + + + + +# [3.2.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.2.0) (2018-01-16) + + +### Bug Fixes + +* remove mistakenly added typescript dependency, rollback version (standard-version will auto-increment) ([09fa824](https://github.com/uuidjs/uuid/commit/09fa824)) +* use msCrypto if available. Fixes [#241](https://github.com/uuidjs/uuid/issues/241) ([#247](https://github.com/uuidjs/uuid/issues/247)) ([1fef18b](https://github.com/uuidjs/uuid/commit/1fef18b)) + + +### Features + +* Add v3 Support ([#217](https://github.com/uuidjs/uuid/issues/217)) ([d94f726](https://github.com/uuidjs/uuid/commit/d94f726)) + + +# [3.1.0](https://github.com/uuidjs/uuid/compare/v3.1.0...v3.0.1) (2017-06-17) + +### Bug Fixes + +* (fix) Add .npmignore file to exclude test/ and other non-essential files from packing. (#183) +* Fix typo (#178) +* Simple typo fix (#165) + +### Features +* v5 support in CLI (#197) +* V5 support (#188) + + +# 3.0.1 (2016-11-28) + +* split uuid versions into separate files + + +# 3.0.0 (2016-11-17) + +* remove .parse and .unparse + + +# 2.0.0 + +* Removed uuid.BufferClass + + +# 1.4.0 + +* Improved module context detection +* Removed public RNG functions + + +# 1.3.2 + +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + + +# 1.3.0 + +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/node_modules/@actions/cache/node_modules/uuid/LICENSE.md b/node_modules/@actions/cache/node_modules/uuid/LICENSE.md new file mode 100644 index 0000000000..8c84e39866 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@actions/cache/node_modules/uuid/README.md b/node_modules/@actions/cache/node_modules/uuid/README.md new file mode 100644 index 0000000000..1752e4751f --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/README.md @@ -0,0 +1,276 @@ + + +# uuid [![Build Status](https://secure.travis-ci.org/kelektiv/node-uuid.svg?branch=master)](http://travis-ci.org/kelektiv/node-uuid) # + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Support for version 1, 3, 4 and 5 UUIDs +* Cross-platform +* Uses cryptographically-strong random number APIs (when available) +* Zero-dependency, small footprint (... but not [this small](https://gist.github.com/982883)) + +[**Deprecation warning**: The use of `require('uuid')` is deprecated and will not be +supported after version 3.x of this module. Instead, use `require('uuid/[v1|v3|v4|v5]')` as shown in the examples below.] + +## Quickstart - CommonJS (Recommended) + +```shell +npm install uuid +``` + +Then generate your uuid version of choice ... + +Version 1 (timestamp): + +```javascript +const uuidv1 = require('uuid/v1'); +uuidv1(); // ⇨ '2c5ea4c0-4067-11e9-8bad-9b1deb4d3b7d' + +``` + +Version 3 (namespace): + +```javascript +const uuidv3 = require('uuid/v3'); + +// ... using predefined DNS namespace (for domain names) +uuidv3('hello.example.com', uuidv3.DNS); // ⇨ '9125a8dc-52ee-365b-a5aa-81b0b3681cf6' + +// ... using predefined URL namespace (for, well, URLs) +uuidv3('http://example.com/hello', uuidv3.URL); // ⇨ 'c6235813-3ba4-3801-ae84-e0a6ebb7d138' + +// ... using a custom namespace +// +// Note: Custom namespaces should be a UUID string specific to your application! +// E.g. the one here was generated using this modules `uuid` CLI. +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; +uuidv3('Hello, World!', MY_NAMESPACE); // ⇨ 'e8b5a51d-11c8-3310-a6ab-367563f20686' + +``` + +Version 4 (random): + +```javascript +const uuidv4 = require('uuid/v4'); +uuidv4(); // ⇨ '1b9d6bcd-bbfd-4b2d-9b5d-ab8dfbbd4bed' + +``` + +Version 5 (namespace): + +```javascript +const uuidv5 = require('uuid/v5'); + +// ... using predefined DNS namespace (for domain names) +uuidv5('hello.example.com', uuidv5.DNS); // ⇨ 'fdda765f-fc57-5604-a269-52a7df8164ec' + +// ... using predefined URL namespace (for, well, URLs) +uuidv5('http://example.com/hello', uuidv5.URL); // ⇨ '3bbcee75-cecc-5b56-8031-b6641c1ed1f1' + +// ... using a custom namespace +// +// Note: Custom namespaces should be a UUID string specific to your application! +// E.g. the one here was generated using this modules `uuid` CLI. +const MY_NAMESPACE = '1b671a64-40d5-491e-99b0-da01ff1f3341'; +uuidv5('Hello, World!', MY_NAMESPACE); // ⇨ '630eb68f-e0fa-5ecc-887a-7c7a62614681' + +``` + +## API + +### Version 1 + +```javascript +const uuidv1 = require('uuid/v1'); + +// Incantations +uuidv1(); +uuidv1(options); +uuidv1(options, buffer, offset); +``` + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Note: The default [node id](https://tools.ietf.org/html/rfc4122#section-4.1.6) (the last 12 digits in the UUID) is generated once, randomly, on process startup, and then remains unchanged for the duration of the process. + +Example: Generate string UUID with fully-specified options + +```javascript +const v1options = { + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}; +uuidv1(v1options); // ⇨ '710b962e-041c-11e1-9234-0123456789ab' + +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +const arr = new Array(); +uuidv1(null, arr, 0); // ⇨ + // [ + // 44, 94, 164, 192, 64, 103, + // 17, 233, 146, 52, 155, 29, + // 235, 77, 59, 125 + // ] +uuidv1(null, arr, 16); // ⇨ + // [ + // 44, 94, 164, 192, 64, 103, 17, 233, + // 146, 52, 155, 29, 235, 77, 59, 125, + // 44, 94, 164, 193, 64, 103, 17, 233, + // 146, 52, 155, 29, 235, 77, 59, 125 + // ] + +``` + +### Version 3 + +```javascript +const uuidv3 = require('uuid/v3'); + +// Incantations +uuidv3(name, namespace); +uuidv3(name, namespace, buffer); +uuidv3(name, namespace, buffer, offset); +``` + +Generate and return a RFC4122 v3 UUID. + +* `name` - (String | Array[]) "name" to create UUID with +* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: + +```javascript +uuidv3('hello world', MY_NAMESPACE); // ⇨ '042ffd34-d989-321c-ad06-f60826172424' + +``` + +### Version 4 + +```javascript +const uuidv4 = require('uuid/v4') + +// Incantations +uuidv4(); +uuidv4(options); +uuidv4(options, buffer, offset); +``` + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator function that returns an Array[16] of byte values (0-255) +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with predefined `random` values + +```javascript +const v4options = { + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}; +uuidv4(v4options); // ⇨ '109156be-c4fb-41ea-b1b4-efe1671c5836' + +``` + +Example: Generate two IDs in a single buffer + +```javascript +const buffer = new Array(); +uuidv4(null, buffer, 0); // ⇨ + // [ + // 155, 29, 235, 77, 59, + // 125, 75, 173, 155, 221, + // 43, 13, 123, 61, 203, + // 109 + // ] +uuidv4(null, buffer, 16); // ⇨ + // [ + // 155, 29, 235, 77, 59, 125, 75, 173, + // 155, 221, 43, 13, 123, 61, 203, 109, + // 27, 157, 107, 205, 187, 253, 75, 45, + // 155, 93, 171, 141, 251, 189, 75, 237 + // ] + +``` + +### Version 5 + +```javascript +const uuidv5 = require('uuid/v5'); + +// Incantations +uuidv5(name, namespace); +uuidv5(name, namespace, buffer); +uuidv5(name, namespace, buffer, offset); +``` + +Generate and return a RFC4122 v5 UUID. + +* `name` - (String | Array[]) "name" to create UUID with +* `namespace` - (String | Array[]) "namespace" UUID either as a String or Array[16] of byte values +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. Default = 0 + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: + +```javascript +uuidv5('hello world', MY_NAMESPACE); // ⇨ '9f282611-e0fd-5650-8953-89c8e342da0b' + +``` + +## Command Line + +UUIDs can be generated from the command line with the `uuid` command. + +```shell +$ uuid +ddeb27fb-d9a0-4624-be4d-4615062daed4 + +$ uuid v1 +02d37060-d446-11e7-a9fa-7bdae751ebe1 +``` + +Type `uuid --help` for usage details + +## Testing + +```shell +npm test +``` + +---- +Markdown generated from [README_js.md](README_js.md) by [![RunMD Logo](http://i.imgur.com/h0FVyzU.png)](https://github.com/broofa/runmd) \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/uuid/bin/uuid b/node_modules/@actions/cache/node_modules/uuid/bin/uuid new file mode 100755 index 0000000000..502626e60f --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/bin/uuid @@ -0,0 +1,65 @@ +#!/usr/bin/env node +var assert = require('assert'); + +function usage() { + console.log('Usage:'); + console.log(' uuid'); + console.log(' uuid v1'); + console.log(' uuid v3 '); + console.log(' uuid v4'); + console.log(' uuid v5 '); + console.log(' uuid --help'); + console.log('\nNote: may be "URL" or "DNS" to use the corresponding UUIDs defined by RFC4122'); +} + +var args = process.argv.slice(2); + +if (args.indexOf('--help') >= 0) { + usage(); + process.exit(0); +} +var version = args.shift() || 'v4'; + +switch (version) { + case 'v1': + var uuidV1 = require('../v1'); + console.log(uuidV1()); + break; + + case 'v3': + var uuidV3 = require('../v3'); + + var name = args.shift(); + var namespace = args.shift(); + assert(name != null, 'v3 name not specified'); + assert(namespace != null, 'v3 namespace not specified'); + + if (namespace == 'URL') namespace = uuidV3.URL; + if (namespace == 'DNS') namespace = uuidV3.DNS; + + console.log(uuidV3(name, namespace)); + break; + + case 'v4': + var uuidV4 = require('../v4'); + console.log(uuidV4()); + break; + + case 'v5': + var uuidV5 = require('../v5'); + + var name = args.shift(); + var namespace = args.shift(); + assert(name != null, 'v5 name not specified'); + assert(namespace != null, 'v5 namespace not specified'); + + if (namespace == 'URL') namespace = uuidV5.URL; + if (namespace == 'DNS') namespace = uuidV5.DNS; + + console.log(uuidV5(name, namespace)); + break; + + default: + usage(); + process.exit(1); +} diff --git a/node_modules/@actions/cache/node_modules/uuid/index.js b/node_modules/@actions/cache/node_modules/uuid/index.js new file mode 100644 index 0000000000..e96791ab4b --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/index.js @@ -0,0 +1,8 @@ +var v1 = require('./v1'); +var v4 = require('./v4'); + +var uuid = v4; +uuid.v1 = v1; +uuid.v4 = v4; + +module.exports = uuid; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js b/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js new file mode 100644 index 0000000000..24b60412a2 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/bytesToUuid.js @@ -0,0 +1,26 @@ +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([ + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]] + ]).join(''); +} + +module.exports = bytesToUuid; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js new file mode 100644 index 0000000000..9b3b6c7eff --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/md5-browser.js @@ -0,0 +1,216 @@ +/* + * Browser-compatible JavaScript MD5 + * + * Modification of JavaScript MD5 + * https://github.com/blueimp/JavaScript-MD5 + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * https://opensource.org/licenses/MIT + * + * Based on + * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message + * Digest Algorithm, as defined in RFC 1321. + * Version 2.2 Copyright (C) Paul Johnston 1999 - 2009 + * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet + * Distributed under the BSD License + * See http://pajhome.org.uk/crypt/md5 for more info. + */ + +'use strict'; + +function md5(bytes) { + if (typeof(bytes) == 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + bytes = new Array(msg.length); + for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i); + } + + return md5ToHexEncodedArray( + wordsToMd5( + bytesToWords(bytes) + , bytes.length * 8) + ); +} + + +/* +* Convert an array of little-endian words to an array of bytes +*/ +function md5ToHexEncodedArray(input) { + var i; + var x; + var output = []; + var length32 = input.length * 32; + var hexTab = '0123456789abcdef'; + var hex; + + for (i = 0; i < length32; i += 8) { + x = (input[i >> 5] >>> (i % 32)) & 0xFF; + + hex = parseInt(hexTab.charAt((x >>> 4) & 0x0F) + hexTab.charAt(x & 0x0F), 16); + + output.push(hex); + } + return output; +} + +/* +* Calculate the MD5 of an array of little-endian words, and a bit length. +*/ +function wordsToMd5(x, len) { + /* append padding */ + x[len >> 5] |= 0x80 << (len % 32); + x[(((len + 64) >>> 9) << 4) + 14] = len; + + var i; + var olda; + var oldb; + var oldc; + var oldd; + var a = 1732584193; + var b = -271733879; + var c = -1732584194; + + var d = 271733878; + + for (i = 0; i < x.length; i += 16) { + olda = a; + oldb = b; + oldc = c; + oldd = d; + + a = md5ff(a, b, c, d, x[i], 7, -680876936); + d = md5ff(d, a, b, c, x[i + 1], 12, -389564586); + c = md5ff(c, d, a, b, x[i + 2], 17, 606105819); + b = md5ff(b, c, d, a, x[i + 3], 22, -1044525330); + a = md5ff(a, b, c, d, x[i + 4], 7, -176418897); + d = md5ff(d, a, b, c, x[i + 5], 12, 1200080426); + c = md5ff(c, d, a, b, x[i + 6], 17, -1473231341); + b = md5ff(b, c, d, a, x[i + 7], 22, -45705983); + a = md5ff(a, b, c, d, x[i + 8], 7, 1770035416); + d = md5ff(d, a, b, c, x[i + 9], 12, -1958414417); + c = md5ff(c, d, a, b, x[i + 10], 17, -42063); + b = md5ff(b, c, d, a, x[i + 11], 22, -1990404162); + a = md5ff(a, b, c, d, x[i + 12], 7, 1804603682); + d = md5ff(d, a, b, c, x[i + 13], 12, -40341101); + c = md5ff(c, d, a, b, x[i + 14], 17, -1502002290); + b = md5ff(b, c, d, a, x[i + 15], 22, 1236535329); + + a = md5gg(a, b, c, d, x[i + 1], 5, -165796510); + d = md5gg(d, a, b, c, x[i + 6], 9, -1069501632); + c = md5gg(c, d, a, b, x[i + 11], 14, 643717713); + b = md5gg(b, c, d, a, x[i], 20, -373897302); + a = md5gg(a, b, c, d, x[i + 5], 5, -701558691); + d = md5gg(d, a, b, c, x[i + 10], 9, 38016083); + c = md5gg(c, d, a, b, x[i + 15], 14, -660478335); + b = md5gg(b, c, d, a, x[i + 4], 20, -405537848); + a = md5gg(a, b, c, d, x[i + 9], 5, 568446438); + d = md5gg(d, a, b, c, x[i + 14], 9, -1019803690); + c = md5gg(c, d, a, b, x[i + 3], 14, -187363961); + b = md5gg(b, c, d, a, x[i + 8], 20, 1163531501); + a = md5gg(a, b, c, d, x[i + 13], 5, -1444681467); + d = md5gg(d, a, b, c, x[i + 2], 9, -51403784); + c = md5gg(c, d, a, b, x[i + 7], 14, 1735328473); + b = md5gg(b, c, d, a, x[i + 12], 20, -1926607734); + + a = md5hh(a, b, c, d, x[i + 5], 4, -378558); + d = md5hh(d, a, b, c, x[i + 8], 11, -2022574463); + c = md5hh(c, d, a, b, x[i + 11], 16, 1839030562); + b = md5hh(b, c, d, a, x[i + 14], 23, -35309556); + a = md5hh(a, b, c, d, x[i + 1], 4, -1530992060); + d = md5hh(d, a, b, c, x[i + 4], 11, 1272893353); + c = md5hh(c, d, a, b, x[i + 7], 16, -155497632); + b = md5hh(b, c, d, a, x[i + 10], 23, -1094730640); + a = md5hh(a, b, c, d, x[i + 13], 4, 681279174); + d = md5hh(d, a, b, c, x[i], 11, -358537222); + c = md5hh(c, d, a, b, x[i + 3], 16, -722521979); + b = md5hh(b, c, d, a, x[i + 6], 23, 76029189); + a = md5hh(a, b, c, d, x[i + 9], 4, -640364487); + d = md5hh(d, a, b, c, x[i + 12], 11, -421815835); + c = md5hh(c, d, a, b, x[i + 15], 16, 530742520); + b = md5hh(b, c, d, a, x[i + 2], 23, -995338651); + + a = md5ii(a, b, c, d, x[i], 6, -198630844); + d = md5ii(d, a, b, c, x[i + 7], 10, 1126891415); + c = md5ii(c, d, a, b, x[i + 14], 15, -1416354905); + b = md5ii(b, c, d, a, x[i + 5], 21, -57434055); + a = md5ii(a, b, c, d, x[i + 12], 6, 1700485571); + d = md5ii(d, a, b, c, x[i + 3], 10, -1894986606); + c = md5ii(c, d, a, b, x[i + 10], 15, -1051523); + b = md5ii(b, c, d, a, x[i + 1], 21, -2054922799); + a = md5ii(a, b, c, d, x[i + 8], 6, 1873313359); + d = md5ii(d, a, b, c, x[i + 15], 10, -30611744); + c = md5ii(c, d, a, b, x[i + 6], 15, -1560198380); + b = md5ii(b, c, d, a, x[i + 13], 21, 1309151649); + a = md5ii(a, b, c, d, x[i + 4], 6, -145523070); + d = md5ii(d, a, b, c, x[i + 11], 10, -1120210379); + c = md5ii(c, d, a, b, x[i + 2], 15, 718787259); + b = md5ii(b, c, d, a, x[i + 9], 21, -343485551); + + a = safeAdd(a, olda); + b = safeAdd(b, oldb); + c = safeAdd(c, oldc); + d = safeAdd(d, oldd); + } + return [a, b, c, d]; +} + +/* +* Convert an array bytes to an array of little-endian words +* Characters >255 have their high-byte silently ignored. +*/ +function bytesToWords(input) { + var i; + var output = []; + output[(input.length >> 2) - 1] = undefined; + for (i = 0; i < output.length; i += 1) { + output[i] = 0; + } + var length8 = input.length * 8; + for (i = 0; i < length8; i += 8) { + output[i >> 5] |= (input[(i / 8)] & 0xFF) << (i % 32); + } + + return output; +} + +/* +* Add integers, wrapping at 2^32. This uses 16-bit operations internally +* to work around bugs in some JS interpreters. +*/ +function safeAdd(x, y) { + var lsw = (x & 0xFFFF) + (y & 0xFFFF); + var msw = (x >> 16) + (y >> 16) + (lsw >> 16); + return (msw << 16) | (lsw & 0xFFFF); +} + +/* +* Bitwise rotate a 32-bit number to the left. +*/ +function bitRotateLeft(num, cnt) { + return (num << cnt) | (num >>> (32 - cnt)); +} + +/* +* These functions implement the four basic operations the algorithm uses. +*/ +function md5cmn(q, a, b, x, s, t) { + return safeAdd(bitRotateLeft(safeAdd(safeAdd(a, q), safeAdd(x, t)), s), b); +} +function md5ff(a, b, c, d, x, s, t) { + return md5cmn((b & c) | ((~b) & d), a, b, x, s, t); +} +function md5gg(a, b, c, d, x, s, t) { + return md5cmn((b & d) | (c & (~d)), a, b, x, s, t); +} +function md5hh(a, b, c, d, x, s, t) { + return md5cmn(b ^ c ^ d, a, b, x, s, t); +} +function md5ii(a, b, c, d, x, s, t) { + return md5cmn(c ^ (b | (~d)), a, b, x, s, t); +} + +module.exports = md5; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/md5.js b/node_modules/@actions/cache/node_modules/uuid/lib/md5.js new file mode 100644 index 0000000000..7044b872f5 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/md5.js @@ -0,0 +1,25 @@ +'use strict'; + +var crypto = require('crypto'); + +function md5(bytes) { + if (typeof Buffer.from === 'function') { + // Modern Buffer API + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + } else { + // Pre-v4 Buffer API + if (Array.isArray(bytes)) { + bytes = new Buffer(bytes); + } else if (typeof bytes === 'string') { + bytes = new Buffer(bytes, 'utf8'); + } + } + + return crypto.createHash('md5').update(bytes).digest(); +} + +module.exports = md5; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js new file mode 100644 index 0000000000..6361fb8147 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/rng-browser.js @@ -0,0 +1,34 @@ +// Unique ID creation requires a high quality random # generator. In the +// browser this is a little complicated due to unknown quality of Math.random() +// and inconsistent support for the `crypto` API. We do the best we can via +// feature-detection + +// getRandomValues needs to be invoked in a context where "this" is a Crypto +// implementation. Also, find the complete implementation of crypto on IE11. +var getRandomValues = (typeof(crypto) != 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto)) || + (typeof(msCrypto) != 'undefined' && typeof window.msCrypto.getRandomValues == 'function' && msCrypto.getRandomValues.bind(msCrypto)); + +if (getRandomValues) { + // WHATWG crypto RNG - http://wiki.whatwg.org/wiki/Crypto + var rnds8 = new Uint8Array(16); // eslint-disable-line no-undef + + module.exports = function whatwgRNG() { + getRandomValues(rnds8); + return rnds8; + }; +} else { + // Math.random()-based (RNG) + // + // If all else fails, use Math.random(). It's fast, but is of unspecified + // quality. + var rnds = new Array(16); + + module.exports = function mathRNG() { + for (var i = 0, r; i < 16; i++) { + if ((i & 0x03) === 0) r = Math.random() * 0x100000000; + rnds[i] = r >>> ((i & 0x03) << 3) & 0xff; + } + + return rnds; + }; +} diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/rng.js b/node_modules/@actions/cache/node_modules/uuid/lib/rng.js new file mode 100644 index 0000000000..58f0dc9cfb --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/rng.js @@ -0,0 +1,8 @@ +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = require('crypto'); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js b/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js new file mode 100644 index 0000000000..5758ed75c9 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/sha1-browser.js @@ -0,0 +1,89 @@ +// Adapted from Chris Veness' SHA1 code at +// http://www.movable-type.co.uk/scripts/sha1.html +'use strict'; + +function f(s, x, y, z) { + switch (s) { + case 0: return (x & y) ^ (~x & z); + case 1: return x ^ y ^ z; + case 2: return (x & y) ^ (x & z) ^ (y & z); + case 3: return x ^ y ^ z; + } +} + +function ROTL(x, n) { + return (x << n) | (x>>> (32 - n)); +} + +function sha1(bytes) { + var K = [0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6]; + var H = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, 0xc3d2e1f0]; + + if (typeof(bytes) == 'string') { + var msg = unescape(encodeURIComponent(bytes)); // UTF8 escape + bytes = new Array(msg.length); + for (var i = 0; i < msg.length; i++) bytes[i] = msg.charCodeAt(i); + } + + bytes.push(0x80); + + var l = bytes.length/4 + 2; + var N = Math.ceil(l/16); + var M = new Array(N); + + for (var i=0; i>> 0; + e = d; + d = c; + c = ROTL(b, 30) >>> 0; + b = a; + a = T; + } + + H[0] = (H[0] + a) >>> 0; + H[1] = (H[1] + b) >>> 0; + H[2] = (H[2] + c) >>> 0; + H[3] = (H[3] + d) >>> 0; + H[4] = (H[4] + e) >>> 0; + } + + return [ + H[0] >> 24 & 0xff, H[0] >> 16 & 0xff, H[0] >> 8 & 0xff, H[0] & 0xff, + H[1] >> 24 & 0xff, H[1] >> 16 & 0xff, H[1] >> 8 & 0xff, H[1] & 0xff, + H[2] >> 24 & 0xff, H[2] >> 16 & 0xff, H[2] >> 8 & 0xff, H[2] & 0xff, + H[3] >> 24 & 0xff, H[3] >> 16 & 0xff, H[3] >> 8 & 0xff, H[3] & 0xff, + H[4] >> 24 & 0xff, H[4] >> 16 & 0xff, H[4] >> 8 & 0xff, H[4] & 0xff + ]; +} + +module.exports = sha1; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js b/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js new file mode 100644 index 0000000000..0b54b25072 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/sha1.js @@ -0,0 +1,25 @@ +'use strict'; + +var crypto = require('crypto'); + +function sha1(bytes) { + if (typeof Buffer.from === 'function') { + // Modern Buffer API + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + } else { + // Pre-v4 Buffer API + if (Array.isArray(bytes)) { + bytes = new Buffer(bytes); + } else if (typeof bytes === 'string') { + bytes = new Buffer(bytes, 'utf8'); + } + } + + return crypto.createHash('sha1').update(bytes).digest(); +} + +module.exports = sha1; diff --git a/node_modules/@actions/cache/node_modules/uuid/lib/v35.js b/node_modules/@actions/cache/node_modules/uuid/lib/v35.js new file mode 100644 index 0000000000..8b066cc5e3 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/lib/v35.js @@ -0,0 +1,57 @@ +var bytesToUuid = require('./bytesToUuid'); + +function uuidToBytes(uuid) { + // Note: We assume we're being passed a valid uuid string + var bytes = []; + uuid.replace(/[a-fA-F0-9]{2}/g, function(hex) { + bytes.push(parseInt(hex, 16)); + }); + + return bytes; +} + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + var bytes = new Array(str.length); + for (var i = 0; i < str.length; i++) { + bytes[i] = str.charCodeAt(i); + } + return bytes; +} + +module.exports = function(name, version, hashfunc) { + var generateUUID = function(value, namespace, buf, offset) { + var off = buf && offset || 0; + + if (typeof(value) == 'string') value = stringToBytes(value); + if (typeof(namespace) == 'string') namespace = uuidToBytes(namespace); + + if (!Array.isArray(value)) throw TypeError('value must be an array of bytes'); + if (!Array.isArray(namespace) || namespace.length !== 16) throw TypeError('namespace must be uuid string or an Array of 16 byte values'); + + // Per 4.3 + var bytes = hashfunc(namespace.concat(value)); + bytes[6] = (bytes[6] & 0x0f) | version; + bytes[8] = (bytes[8] & 0x3f) | 0x80; + + if (buf) { + for (var idx = 0; idx < 16; ++idx) { + buf[off+idx] = bytes[idx]; + } + } + + return buf || bytesToUuid(bytes); + }; + + // Function#name is not settable on some platforms (#270) + try { + generateUUID.name = name; + } catch (err) { + } + + // Pre-defined namespaces, per Appendix C + generateUUID.DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; + generateUUID.URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; + + return generateUUID; +}; diff --git a/node_modules/@actions/cache/node_modules/uuid/package.json b/node_modules/@actions/cache/node_modules/uuid/package.json new file mode 100644 index 0000000000..efc07b8f6c --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/package.json @@ -0,0 +1,49 @@ +{ + "name": "uuid", + "version": "3.4.0", + "description": "RFC4122 (v1, v4, and v5) UUIDs", + "commitlint": { + "extends": [ + "@commitlint/config-conventional" + ] + }, + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "license": "MIT", + "bin": { + "uuid": "./bin/uuid" + }, + "devDependencies": { + "@commitlint/cli": "~8.2.0", + "@commitlint/config-conventional": "~8.2.0", + "eslint": "~6.4.0", + "husky": "~3.0.5", + "mocha": "6.2.0", + "runmd": "1.2.1", + "standard-version": "7.0.0" + }, + "scripts": { + "lint": "eslint .", + "test": "npm run lint && mocha test/test.js", + "md": "runmd --watch --output=README.md README_js.md", + "release": "standard-version", + "prepare": "runmd --output=README.md README_js.md" + }, + "browser": { + "./lib/rng.js": "./lib/rng-browser.js", + "./lib/sha1.js": "./lib/sha1-browser.js", + "./lib/md5.js": "./lib/md5-browser.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/uuidjs/uuid.git" + }, + "husky": { + "hooks": { + "commit-msg": "commitlint -E HUSKY_GIT_PARAMS" + } + } +} diff --git a/node_modules/@actions/cache/node_modules/uuid/v1.js b/node_modules/@actions/cache/node_modules/uuid/v1.js new file mode 100644 index 0000000000..8c245de43d --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v1.js @@ -0,0 +1,109 @@ +var rng = require('./lib/rng'); +var bytesToUuid = require('./lib/bytesToUuid'); + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html + +var _nodeId; +var _clockseq; + +// Previous uuid creation time +var _lastMSecs = 0; +var _lastNSecs = 0; + +// See https://github.com/uuidjs/uuid for API details +function v1(options, buf, offset) { + var i = buf && offset || 0; + var b = buf || []; + + options = options || {}; + var node = options.node || _nodeId; + var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; + + // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + if (node == null || clockseq == null) { + var seedBytes = rng(); + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [ + seedBytes[0] | 0x01, + seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] + ]; + } + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } + + // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); + + // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; + + // Time since last uuid creation (in msecs) + var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; + + // Per 4.2.1.2, Bump clockseq on clock regression + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } + + // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } + + // Per 4.2.1.2 Throw error if too many uuids are requested + if (nsecs >= 10000) { + throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; + + // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + msecs += 12219292800000; + + // `time_low` + var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; + + // `time_mid` + var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; + + // `time_high_and_version` + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + b[i++] = tmh >>> 16 & 0xff; + + // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + b[i++] = clockseq >>> 8 | 0x80; + + // `clock_seq_low` + b[i++] = clockseq & 0xff; + + // `node` + for (var n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf ? buf : bytesToUuid(b); +} + +module.exports = v1; diff --git a/node_modules/@actions/cache/node_modules/uuid/v3.js b/node_modules/@actions/cache/node_modules/uuid/v3.js new file mode 100644 index 0000000000..ee7e14c0f0 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v3.js @@ -0,0 +1,4 @@ +var v35 = require('./lib/v35.js'); +var md5 = require('./lib/md5'); + +module.exports = v35('v3', 0x30, md5); \ No newline at end of file diff --git a/node_modules/@actions/cache/node_modules/uuid/v4.js b/node_modules/@actions/cache/node_modules/uuid/v4.js new file mode 100644 index 0000000000..1f07be1c53 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v4.js @@ -0,0 +1,29 @@ +var rng = require('./lib/rng'); +var bytesToUuid = require('./lib/bytesToUuid'); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; diff --git a/node_modules/@actions/cache/node_modules/uuid/v5.js b/node_modules/@actions/cache/node_modules/uuid/v5.js new file mode 100644 index 0000000000..4945baf385 --- /dev/null +++ b/node_modules/@actions/cache/node_modules/uuid/v5.js @@ -0,0 +1,3 @@ +var v35 = require('./lib/v35.js'); +var sha1 = require('./lib/sha1'); +module.exports = v35('v5', 0x50, sha1); diff --git a/node_modules/@actions/cache/package.json b/node_modules/@actions/cache/package.json new file mode 100644 index 0000000000..3cb06d3041 --- /dev/null +++ b/node_modules/@actions/cache/package.json @@ -0,0 +1,55 @@ +{ + "name": "@actions/cache", + "version": "3.0.0", + "preview": true, + "description": "Actions cache lib", + "keywords": [ + "github", + "actions", + "cache" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/cache", + "license": "MIT", + "main": "lib/cache.js", + "types": "lib/cache.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/cache" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.6", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.0.1", + "@actions/io": "^1.0.1", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.8.0", + "semver": "^6.1.0", + "uuid": "^3.3.3" + }, + "devDependencies": { + "@types/semver": "^6.0.0", + "@types/uuid": "^3.4.5", + "typescript": "^3.8.3" + } +} diff --git a/node_modules/@actions/glob/LICENSE.md b/node_modules/@actions/glob/LICENSE.md new file mode 100644 index 0000000000..dbae2edb2c --- /dev/null +++ b/node_modules/@actions/glob/LICENSE.md @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright 2019 GitHub + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/@actions/glob/README.md b/node_modules/@actions/glob/README.md new file mode 100644 index 0000000000..9575beaa1d --- /dev/null +++ b/node_modules/@actions/glob/README.md @@ -0,0 +1,113 @@ +# `@actions/glob` + +## Usage + +### Basic + +You can use this package to search for files matching glob patterns. + +Relative paths and absolute paths are both allowed. Relative paths are rooted against the current working directory. + +```js +const glob = require('@actions/glob'); + +const patterns = ['**/tar.gz', '**/tar.bz'] +const globber = await glob.create(patterns.join('\n')) +const files = await globber.glob() +``` + +### Opt out of following symbolic links + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**', {followSymbolicLinks: false}) +const files = await globber.glob() +``` + +### Iterator + +When dealing with a large amount of results, consider iterating the results as they are returned: + +```js +const glob = require('@actions/glob'); + +const globber = await glob.create('**') +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Recommended action inputs + +Glob follows symbolic links by default. Following is often appropriate unless deleting files. + +Users may want to opt-out from following symbolic links for other reasons. For example, +excessive amounts of symbolic links can create the appearance of very, very many files +and slow the search. + +When an action allows a user to specify input patterns, it is generally recommended to +allow users to opt-out from following symbolic links. + +Snippet from `action.yml`: + +```yaml +inputs: + files: + description: 'Files to print' + required: true + follow-symbolic-links: + description: 'Indicates whether to follow symbolic links' + default: true +``` + +And corresponding toolkit consumption: + +```js +const core = require('@actions/core') +const glob = require('@actions/glob') + +const globOptions = { + followSymbolicLinks: core.getInput('follow-symbolic-links').toUpper() !== 'FALSE' +} +const globber = glob.create(core.getInput('files'), globOptions) +for await (const file of globber.globGenerator()) { + console.log(file) +} +``` + +## Patterns + +### Glob behavior + +Patterns `*`, `?`, `[...]`, `**` (globstar) are supported. + +With the following behaviors: +- File names that begin with `.` may be included in the results +- Case insensitive on Windows +- Directory separator `/` and `\` both supported on Windows + +### Tilde expansion + +Supports basic tilde expansion, for current user HOME replacement only. + +Example: +- `~` may expand to /Users/johndoe +- `~/foo` may expand to /Users/johndoe/foo + +### Comments + +Patterns that begin with `#` are treated as comments. + +### Exclude patterns + +Leading `!` changes the meaning of an include pattern to exclude. + +Multiple leading `!` flips the meaning. + +### Escaping + +Wrapping special characters in `[]` can be used to escape literal glob characters +in a file name. For example the literal file name `hello[a-z]` can be escaped as `hello[[]a-z]`. + +On Linux/macOS `\` is also treated as an escape character. diff --git a/node_modules/@actions/glob/lib/glob.d.ts b/node_modules/@actions/glob/lib/glob.d.ts new file mode 100644 index 0000000000..56424fc3a8 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.d.ts @@ -0,0 +1,10 @@ +import { Globber } from './internal-globber'; +import { GlobOptions } from './internal-glob-options'; +export { Globber, GlobOptions }; +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +export declare function create(patterns: string, options?: GlobOptions): Promise; diff --git a/node_modules/@actions/glob/lib/glob.js b/node_modules/@actions/glob/lib/glob.js new file mode 100644 index 0000000000..58cb68331e --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js @@ -0,0 +1,26 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const internal_globber_1 = require("./internal-globber"); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/glob.js.map b/node_modules/@actions/glob/lib/glob.js.map new file mode 100644 index 0000000000..b27e7e4af6 --- /dev/null +++ b/node_modules/@actions/glob/lib/glob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"glob.js","sourceRoot":"","sources":["../src/glob.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,yDAA0D;AAK1D;;;;;GAKG;AACH,SAAsB,MAAM,CAC1B,QAAgB,EAChB,OAAqB;;QAErB,OAAO,MAAM,iCAAc,CAAC,MAAM,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAA;IACvD,CAAC;CAAA;AALD,wBAKC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts new file mode 100644 index 0000000000..8a789606e3 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.d.ts @@ -0,0 +1,5 @@ +import { GlobOptions } from './internal-glob-options'; +/** + * Returns a copy with defaults filled in. + */ +export declare function getOptions(copy?: GlobOptions): GlobOptions; diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js b/node_modules/@actions/glob/lib/internal-glob-options-helper.js new file mode 100644 index 0000000000..974dfd038d --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js @@ -0,0 +1,50 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOptions = void 0; +const core = __importStar(require("@actions/core")); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map new file mode 100644 index 0000000000..93376ddc0a --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options-helper.js","sourceRoot":"","sources":["../src/internal-glob-options-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AAGrC;;GAEG;AACH,SAAgB,UAAU,CAAC,IAAkB;IAC3C,MAAM,MAAM,GAAgB;QAC1B,mBAAmB,EAAE,IAAI;QACzB,mBAAmB,EAAE,IAAI;QACzB,uBAAuB,EAAE,IAAI;KAC9B,CAAA;IAED,IAAI,IAAI,EAAE;QACR,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,SAAS,EAAE;YACjD,MAAM,CAAC,mBAAmB,GAAG,IAAI,CAAC,mBAAmB,CAAA;YACrD,IAAI,CAAC,KAAK,CAAC,wBAAwB,MAAM,CAAC,mBAAmB,GAAG,CAAC,CAAA;SAClE;QAED,IAAI,OAAO,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAAE;YACrD,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAA;YAC7D,IAAI,CAAC,KAAK,CAAC,4BAA4B,MAAM,CAAC,uBAAuB,GAAG,CAAC,CAAA;SAC1E;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAzBD,gCAyBC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.d.ts b/node_modules/@actions/glob/lib/internal-glob-options.d.ts new file mode 100644 index 0000000000..c1cac0f1f4 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.d.ts @@ -0,0 +1,29 @@ +/** + * Options to control globbing behavior + */ +export interface GlobOptions { + /** + * Indicates whether to follow symbolic links. Generally should set to false + * when deleting files. + * + * @default true + */ + followSymbolicLinks?: boolean; + /** + * Indicates whether directories that match a glob pattern, should implicitly + * cause all descendant paths to be matched. + * + * For example, given the directory `my-dir`, the following glob patterns + * would produce the same results: `my-dir/**`, `my-dir/`, `my-dir` + * + * @default true + */ + implicitDescendants?: boolean; + /** + * Indicates whether broken symbolic should be ignored and omitted from the + * result set. Otherwise an error will be thrown. + * + * @default true + */ + omitBrokenSymbolicLinks?: boolean; +} diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js b/node_modules/@actions/glob/lib/internal-glob-options.js new file mode 100644 index 0000000000..8960c70efe --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=internal-glob-options.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-glob-options.js.map b/node_modules/@actions/glob/lib/internal-glob-options.js.map new file mode 100644 index 0000000000..dec7953297 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-glob-options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-glob-options.js","sourceRoot":"","sources":["../src/internal-glob-options.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.d.ts b/node_modules/@actions/glob/lib/internal-globber.d.ts new file mode 100644 index 0000000000..6d0036bf75 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.d.ts @@ -0,0 +1,42 @@ +import { GlobOptions } from './internal-glob-options'; +export { GlobOptions }; +/** + * Used to match files and directories + */ +export interface Globber { + /** + * Returns the search path preceding the first glob segment, from each pattern. + * Duplicates and descendants of other paths are filtered out. + * + * Example 1: The patterns `/foo/*` and `/bar/*` returns `/foo` and `/bar`. + * + * Example 2: The patterns `/foo/*` and `/foo/bar/*` returns `/foo`. + */ + getSearchPaths(): string[]; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + glob(): Promise; + /** + * Returns files and directories matching the glob patterns. + * + * Order of the results is not guaranteed. + */ + globGenerator(): AsyncGenerator; +} +export declare class DefaultGlobber implements Globber { + private readonly options; + private readonly patterns; + private readonly searchPaths; + private constructor(); + getSearchPaths(): string[]; + glob(): Promise; + globGenerator(): AsyncGenerator; + /** + * Constructs a DefaultGlobber + */ + static create(patterns: string, options?: GlobOptions): Promise; + private static stat; +} diff --git a/node_modules/@actions/glob/lib/internal-globber.js b/node_modules/@actions/glob/lib/internal-globber.js new file mode 100644 index 0000000000..a6695b5c21 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js @@ -0,0 +1,235 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultGlobber = void 0; +const core = __importStar(require("@actions/core")); +const fs = __importStar(require("fs")); +const globOptionsHelper = __importStar(require("./internal-glob-options-helper")); +const path = __importStar(require("path")); +const patternHelper = __importStar(require("./internal-pattern-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_pattern_1 = require("./internal-pattern"); +const internal_search_state_1 = require("./internal-search-state"); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-globber.js.map b/node_modules/@actions/glob/lib/internal-globber.js.map new file mode 100644 index 0000000000..cd1f05d9ba --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-globber.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-globber.js","sourceRoot":"","sources":["../src/internal-globber.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,oDAAqC;AACrC,uCAAwB;AACxB,kFAAmE;AACnE,2CAA4B;AAC5B,yEAA0D;AAE1D,+DAA+C;AAC/C,yDAA0C;AAC1C,mEAAmD;AAEnD,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAiC/C,MAAa,cAAc;IAKzB,YAAoB,OAAqB;QAHxB,aAAQ,GAAc,EAAE,CAAA;QACxB,gBAAW,GAAa,EAAE,CAAA;QAGzC,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA;IACtD,CAAC;IAED,cAAc;QACZ,gBAAgB;QAChB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,CAAA;IACjC,CAAC;IAEK,IAAI;;;YACR,MAAM,MAAM,GAAa,EAAE,CAAA;;gBAC3B,KAA6B,IAAA,KAAA,cAAA,IAAI,CAAC,aAAa,EAAE,CAAA,IAAA;oBAAtC,MAAM,QAAQ,WAAA,CAAA;oBACvB,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;iBACtB;;;;;;;;;YACD,OAAO,MAAM,CAAA;;KACd;IAEM,aAAa;;YAClB,2BAA2B;YAC3B,MAAM,OAAO,GAAG,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;YAC1D,wBAAwB;YACxB,MAAM,QAAQ,GAAc,EAAE,CAAA;YAC9B,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACnC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;gBACtB,IACE,OAAO,CAAC,mBAAmB;oBAC3B,CAAC,OAAO,CAAC,iBAAiB;wBACxB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,CAAC,EACzD;oBACA,QAAQ,CAAC,IAAI,CACX,IAAI,0BAAO,CAAC,OAAO,CAAC,MAAM,EAAE,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CACjE,CAAA;iBACF;aACF;YAED,wBAAwB;YAExB,MAAM,KAAK,GAAkB,EAAE,CAAA;YAC/B,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,cAAc,CAAC,QAAQ,CAAC,EAAE;gBAC/D,IAAI,CAAC,KAAK,CAAC,gBAAgB,UAAU,GAAG,CAAC,CAAA;gBAEzC,UAAU;gBACV,IAAI;oBACF,0DAA0D;oBAC1D,mDAAmD;oBACnD,cAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA,CAAA;iBACpC;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,SAAQ;qBACT;oBACD,MAAM,GAAG,CAAA;iBACV;gBAED,KAAK,CAAC,OAAO,CAAC,IAAI,mCAAW,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC,CAAA;aAC9C;YAED,SAAS;YACT,MAAM,cAAc,GAAa,EAAE,CAAA,CAAC,wBAAwB;YAC5D,OAAO,KAAK,CAAC,MAAM,EAAE;gBACnB,MAAM;gBACN,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAiB,CAAA;gBAEvC,SAAS;gBACT,MAAM,KAAK,GAAG,aAAa,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBACtD,MAAM,YAAY,GAChB,CAAC,CAAC,KAAK,IAAI,aAAa,CAAC,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,IAAI,CAAC,CAAA;gBAC5D,IAAI,CAAC,KAAK,IAAI,CAAC,YAAY,EAAE;oBAC3B,SAAQ;iBACT;gBAED,OAAO;gBACP,MAAM,KAAK,GAAyB,cAAM,cAAc,CAAC,IAAI,CAC3D,IAAI,EACJ,OAAO,EACP,cAAc,CACf;gBAED,iEAAiE;iBAFhE,CAAA;gBAED,iEAAiE;gBACjE,IAAI,CAAC,KAAK,EAAE;oBACV,SAAQ;iBACT;gBAED,YAAY;gBACZ,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;oBACvB,UAAU;oBACV,IAAI,KAAK,GAAG,+BAAS,CAAC,SAAS,EAAE;wBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;qBAChB;oBACD,WAAW;yBACN,IAAI,CAAC,YAAY,EAAE;wBACtB,SAAQ;qBACT;oBAED,kCAAkC;oBAClC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,GAAG,CAAC,CAAA;oBACjC,MAAM,UAAU,GAAG,CAAC,cAAM,EAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,CAAC,CAAC,GAAG,CAC3D,CAAC,CAAC,EAAE,CAAC,IAAI,mCAAW,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,UAAU,CAAC,CAC1D,CAAA;oBACD,KAAK,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,OAAO,EAAE,CAAC,CAAA;iBACpC;gBACD,OAAO;qBACF,IAAI,KAAK,GAAG,+BAAS,CAAC,IAAI,EAAE;oBAC/B,oBAAM,IAAI,CAAC,IAAI,CAAA,CAAA;iBAChB;aACF;QACH,CAAC;KAAA;IAED;;OAEG;IACH,MAAM,CAAO,MAAM,CACjB,QAAgB,EAChB,OAAqB;;YAErB,MAAM,MAAM,GAAG,IAAI,cAAc,CAAC,OAAO,CAAC,CAAA;YAE1C,IAAI,UAAU,EAAE;gBACd,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAA;gBAC1C,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;aACzC;YAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;YACrD,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,mBAAmB;gBACnB,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACjC,SAAQ;iBACT;gBACD,UAAU;qBACL;oBACH,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,0BAAO,CAAC,IAAI,CAAC,CAAC,CAAA;iBACxC;aACF;YAED,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,GAAG,aAAa,CAAC,cAAc,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAA;YAEzE,OAAO,MAAM,CAAA;QACf,CAAC;KAAA;IAEO,MAAM,CAAO,IAAI,CACvB,IAAiB,EACjB,OAAoB,EACpB,cAAwB;;YAExB,QAAQ;YACR,uEAAuE;YACvE,8CAA8C;YAC9C,IAAI,KAAe,CAAA;YACnB,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBAC/B,IAAI;oBACF,kCAAkC;oBAClC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;iBAC1C;gBAAC,OAAO,GAAG,EAAE;oBACZ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;wBACzB,IAAI,OAAO,CAAC,uBAAuB,EAAE;4BACnC,IAAI,CAAC,KAAK,CAAC,mBAAmB,IAAI,CAAC,IAAI,GAAG,CAAC,CAAA;4BAC3C,OAAO,SAAS,CAAA;yBACjB;wBAED,MAAM,IAAI,KAAK,CACb,sCAAsC,IAAI,CAAC,IAAI,8CAA8C,CAC9F,CAAA;qBACF;oBAED,MAAM,GAAG,CAAA;iBACV;aACF;iBAAM;gBACL,uCAAuC;gBACvC,KAAK,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;aAC3C;YAED,+DAA+D;YAC/D,IAAI,KAAK,CAAC,WAAW,EAAE,IAAI,OAAO,CAAC,mBAAmB,EAAE;gBACtD,mBAAmB;gBACnB,MAAM,QAAQ,GAAW,MAAM,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;gBAE9D,oDAAoD;gBACpD,OAAO,cAAc,CAAC,MAAM,IAAI,IAAI,CAAC,KAAK,EAAE;oBAC1C,cAAc,CAAC,GAAG,EAAE,CAAA;iBACrB;gBAED,mBAAmB;gBACnB,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,EAAE;oBACtD,IAAI,CAAC,KAAK,CACR,oCAAoC,IAAI,CAAC,IAAI,mBAAmB,QAAQ,GAAG,CAC5E,CAAA;oBACD,OAAO,SAAS,CAAA;iBACjB;gBAED,6BAA6B;gBAC7B,cAAc,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;aAC9B;YAED,OAAO,KAAK,CAAA;QACd,CAAC;KAAA;CACF;AAvMD,wCAuMC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.d.ts b/node_modules/@actions/glob/lib/internal-match-kind.d.ts new file mode 100644 index 0000000000..a7e60aaf7c --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.d.ts @@ -0,0 +1,13 @@ +/** + * Indicates whether a pattern matches a path + */ +export declare enum MatchKind { + /** Not matched */ + None = 0, + /** Matched if the path is a directory */ + Directory = 1, + /** Matched if the path is a regular file */ + File = 2, + /** Matched */ + All = 3 +} diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js b/node_modules/@actions/glob/lib/internal-match-kind.js new file mode 100644 index 0000000000..37146ae7e0 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MatchKind = void 0; +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-match-kind.js.map b/node_modules/@actions/glob/lib/internal-match-kind.js.map new file mode 100644 index 0000000000..6fa3c9f71e --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-match-kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-match-kind.js","sourceRoot":"","sources":["../src/internal-match-kind.ts"],"names":[],"mappings":";;;AAAA;;GAEG;AACH,IAAY,SAYX;AAZD,WAAY,SAAS;IACnB,kBAAkB;IAClB,yCAAQ,CAAA;IAER,yCAAyC;IACzC,mDAAa,CAAA;IAEb,4CAA4C;IAC5C,yCAAQ,CAAA;IAER,cAAc;IACd,uCAAsB,CAAA;AACxB,CAAC,EAZW,SAAS,GAAT,iBAAS,KAAT,iBAAS,QAYpB"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.d.ts b/node_modules/@actions/glob/lib/internal-path-helper.d.ts new file mode 100644 index 0000000000..0375c36c57 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.d.ts @@ -0,0 +1,42 @@ +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +export declare function dirname(p: string): string; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +export declare function ensureAbsoluteRoot(root: string, itemPath: string): string; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +export declare function hasAbsoluteRoot(itemPath: string): boolean; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +export declare function hasRoot(itemPath: string): boolean; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +export declare function normalizeSeparators(p: string): string; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +export declare function safeTrimTrailingSeparator(p: string): string; diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js b/node_modules/@actions/glob/lib/internal-path-helper.js new file mode 100644 index 0000000000..5057add379 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js @@ -0,0 +1,198 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; +const path = __importStar(require("path")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; +} +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path-helper.js.map b/node_modules/@actions/glob/lib/internal-path-helper.js.map new file mode 100644 index 0000000000..1dc7ef9ad6 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path-helper.js","sourceRoot":"","sources":["../src/internal-path-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;;;;;;;;;;;;;;GAgBG;AACH,SAAgB,OAAO,CAAC,CAAS;IAC/B,wDAAwD;IACxD,CAAC,GAAG,yBAAyB,CAAC,CAAC,CAAC,CAAA;IAEhC,kDAAkD;IAClD,IAAI,UAAU,IAAI,yBAAyB,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACnD,OAAO,CAAC,CAAA;KACT;IAED,cAAc;IACd,IAAI,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAA;IAE5B,gEAAgE;IAChE,IAAI,UAAU,IAAI,wBAAwB,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACvD,MAAM,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAA;KAC3C;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAlBD,0BAkBC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,IAAY,EAAE,QAAgB;IAC/D,gBAAM,CAAC,IAAI,EAAE,uDAAuD,CAAC,CAAA;IACrE,gBAAM,CAAC,QAAQ,EAAE,2DAA2D,CAAC,CAAA;IAE7E,iBAAiB;IACjB,IAAI,eAAe,CAAC,QAAQ,CAAC,EAAE;QAC7B,OAAO,QAAQ,CAAA;KAChB;IAED,UAAU;IACV,IAAI,UAAU,EAAE;QACd,sCAAsC;QACtC,IAAI,QAAQ,CAAC,KAAK,CAAC,yBAAyB,CAAC,EAAE;YAC7C,IAAI,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACvB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,0CAA0C;YAC1C,IAAI,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,EAAE,EAAE;gBACtD,sBAAsB;gBACtB,IAAI,QAAQ,CAAC,MAAM,KAAK,CAAC,EAAE;oBACzB,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAC3C;gBACD,2BAA2B;qBACtB;oBACH,IAAI,CAAC,GAAG,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;wBACvB,GAAG,IAAI,IAAI,CAAA;qBACZ;oBACD,wDAAwD;oBACxD,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;iBAChE;aACF;YACD,kBAAkB;iBACb;gBACH,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;aAChD;SACF;QACD,oCAAoC;aAC/B,IAAI,mBAAmB,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,eAAe,CAAC,EAAE;YAC7D,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;YACzB,gBAAM,CACJ,GAAG,CAAC,KAAK,CAAC,YAAY,CAAC,EACvB,4EAA4E,GAAG,GAAG,CACnF,CAAA;YAED,OAAO,GAAG,GAAG,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAA;SAC3C;KACF;IAED,gBAAM,CACJ,eAAe,CAAC,IAAI,CAAC,EACrB,gEAAgE,CACjE,CAAA;IAED,8CAA8C;IAC9C,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE;QAC7D,sBAAsB;KACvB;SAAM;QACL,mBAAmB;QACnB,IAAI,IAAI,IAAI,CAAC,GAAG,CAAA;KACjB;IAED,OAAO,IAAI,GAAG,QAAQ,CAAA;AACxB,CAAC;AAlED,gDAkEC;AAED;;;GAGG;AACH,SAAgB,eAAe,CAAC,QAAgB;IAC9C,gBAAM,CAAC,QAAQ,EAAE,wDAAwD,CAAC,CAAA;IAE1E,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,iCAAiC;QACjC,OAAO,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,IAAI,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAClE;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAdD,0CAcC;AAED;;;GAGG;AACH,SAAgB,OAAO,CAAC,QAAgB;IACtC,gBAAM,CAAC,QAAQ,EAAE,iDAAiD,CAAC,CAAA;IAEnE,uBAAuB;IACvB,QAAQ,GAAG,mBAAmB,CAAC,QAAQ,CAAC,CAAA;IAExC,UAAU;IACV,IAAI,UAAU,EAAE;QACd,8BAA8B;QAC9B,sBAAsB;QACtB,OAAO,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;KAC9D;IAED,cAAc;IACd,OAAO,QAAQ,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,0BAeC;AAED;;GAEG;AACH,SAAgB,mBAAmB,CAAC,CAAS;IAC3C,CAAC,GAAG,CAAC,IAAI,EAAE,CAAA;IAEX,UAAU;IACV,IAAI,UAAU,EAAE;QACd,6BAA6B;QAC7B,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;QAE1B,2BAA2B;QAC3B,MAAM,KAAK,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA,CAAC,eAAe;QACnD,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAA,CAAC,8BAA8B;KACtF;IAED,2BAA2B;IAC3B,OAAO,CAAC,CAAC,OAAO,CAAC,QAAQ,EAAE,GAAG,CAAC,CAAA;AACjC,CAAC;AAfD,kDAeC;AAED;;;GAGG;AACH,SAAgB,yBAAyB,CAAC,CAAS;IACjD,yBAAyB;IACzB,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,EAAE,CAAA;KACV;IAED,uBAAuB;IACvB,CAAC,GAAG,mBAAmB,CAAC,CAAC,CAAC,CAAA;IAE1B,oBAAoB;IACpB,IAAI,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QACzB,OAAO,CAAC,CAAA;KACT;IAED,8CAA8C;IAC9C,IAAI,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;QAClB,OAAO,CAAC,CAAA;KACT;IAED,2CAA2C;IAC3C,IAAI,UAAU,IAAI,aAAa,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE;QACvC,OAAO,CAAC,CAAA;KACT;IAED,gCAAgC;IAChC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;AAClC,CAAC;AA1BD,8DA0BC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.d.ts b/node_modules/@actions/glob/lib/internal-path.d.ts new file mode 100644 index 0000000000..5e0a2641f4 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.d.ts @@ -0,0 +1,15 @@ +/** + * Helper class for parsing paths into segments + */ +export declare class Path { + segments: string[]; + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath: string | string[]); + /** + * Converts the path to it's string representation + */ + toString(): string; +} diff --git a/node_modules/@actions/glob/lib/internal-path.js b/node_modules/@actions/glob/lib/internal-path.js new file mode 100644 index 0000000000..88a8715a9a --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js @@ -0,0 +1,113 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Path = void 0; +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-path.js.map b/node_modules/@actions/glob/lib/internal-path.js.map new file mode 100644 index 0000000000..8e8eeb57e7 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-path.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-path.js","sourceRoot":"","sources":["../src/internal-path.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAE3B,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;GAEG;AACH,MAAa,IAAI;IAGf;;;OAGG;IACH,YAAY,QAA2B;QANvC,aAAQ,GAAa,EAAE,CAAA;QAOrB,SAAS;QACT,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAChC,gBAAM,CAAC,QAAQ,EAAE,wCAAwC,CAAC,CAAA;YAE1D,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;YAEzD,aAAa;YACb,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;gBACjC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;aACzC;YACD,SAAS;iBACJ;gBACH,0CAA0C;gBAC1C,IAAI,SAAS,GAAG,QAAQ,CAAA;gBACxB,IAAI,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;gBACvC,OAAO,GAAG,KAAK,SAAS,EAAE;oBACxB,kBAAkB;oBAClB,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAA;oBACzC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAA;oBAE/B,4BAA4B;oBAC5B,SAAS,GAAG,GAAG,CAAA;oBACf,GAAG,GAAG,UAAU,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;iBACpC;gBAED,wBAAwB;gBACxB,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAA;aACjC;SACF;QACD,QAAQ;aACH;YACH,oBAAoB;YACpB,gBAAM,CACJ,QAAQ,CAAC,MAAM,GAAG,CAAC,EACnB,iDAAiD,CAClD,CAAA;YAED,eAAe;YACf,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBACxC,IAAI,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAA;gBAEzB,oBAAoB;gBACpB,gBAAM,CACJ,OAAO,EACP,0DAA0D,CAC3D,CAAA;gBAED,oBAAoB;gBACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;gBAErD,eAAe;gBACf,IAAI,CAAC,KAAK,CAAC,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;oBAC1C,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;oBACvD,gBAAM,CACJ,OAAO,KAAK,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,EACvC,8EAA8E,CAC/E,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;gBACD,qBAAqB;qBAChB;oBACH,yBAAyB;oBACzB,gBAAM,CACJ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,EAC3B,0DAA0D,CAC3D,CAAA;oBACD,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;iBAC5B;aACF;SACF;IACH,CAAC;IAED;;OAEG;IACH,QAAQ;QACN,gBAAgB;QAChB,IAAI,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;QAE7B,aAAa;QACb,IAAI,SAAS,GACX,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,UAAU,IAAI,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAA;QACvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,IAAI,SAAS,EAAE;gBACb,SAAS,GAAG,KAAK,CAAA;aAClB;iBAAM;gBACL,MAAM,IAAI,IAAI,CAAC,GAAG,CAAA;aACnB;YAED,MAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SAC3B;QAED,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAvGD,oBAuGC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts new file mode 100644 index 0000000000..c5dcc0725f --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.d.ts @@ -0,0 +1,15 @@ +import { MatchKind } from './internal-match-kind'; +import { Pattern } from './internal-pattern'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +export declare function getSearchPaths(patterns: Pattern[]): string[]; +/** + * Matches the patterns against the path + */ +export declare function match(patterns: Pattern[], itemPath: string): MatchKind; +/** + * Checks whether to descend further into the directory + */ +export declare function partialMatch(patterns: Pattern[], itemPath: string): boolean; diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js b/node_modules/@actions/glob/lib/internal-pattern-helper.js new file mode 100644 index 0000000000..a7a0440d43 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js @@ -0,0 +1,94 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.partialMatch = exports.match = exports.getSearchPaths = void 0; +const pathHelper = __importStar(require("./internal-path-helper")); +const internal_match_kind_1 = require("./internal-match-kind"); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern-helper.js.map b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map new file mode 100644 index 0000000000..ae372853b9 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern-helper.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern-helper.js","sourceRoot":"","sources":["../src/internal-pattern-helper.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,mEAAoD;AACpD,+DAA+C;AAG/C,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C;;;GAGG;AACH,SAAgB,cAAc,CAAC,QAAmB;IAChD,yBAAyB;IACzB,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAA;IAE1C,mCAAmC;IACnC,MAAM,aAAa,GAA4B,EAAE,CAAA;IACjD,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,aAAa,CAAC,GAAG,CAAC,GAAG,WAAW,CAAA;KACjC;IAED,MAAM,MAAM,GAAa,EAAE,CAAA;IAE3B,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,4BAA4B;QAC5B,MAAM,GAAG,GAAG,UAAU;YACpB,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,WAAW,EAAE;YAClC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAA;QACtB,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,UAAU,EAAE;YACrC,SAAQ;SACT;QAED,oCAAoC;QACpC,IAAI,aAAa,GAAG,KAAK,CAAA;QACzB,IAAI,OAAO,GAAG,GAAG,CAAA;QACjB,IAAI,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QACxC,OAAO,MAAM,KAAK,OAAO,EAAE;YACzB,IAAI,aAAa,CAAC,MAAM,CAAC,EAAE;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,MAAK;aACN;YAED,OAAO,GAAG,MAAM,CAAA;YAChB,MAAM,GAAG,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;SACrC;QAED,2CAA2C;QAC3C,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,CAAA;YAC/B,aAAa,CAAC,GAAG,CAAC,GAAG,UAAU,CAAA;SAChC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AA9CD,wCA8CC;AAED;;GAEG;AACH,SAAgB,KAAK,CAAC,QAAmB,EAAE,QAAgB;IACzD,IAAI,MAAM,GAAc,+BAAS,CAAC,IAAI,CAAA;IAEtC,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SACnC;aAAM;YACL,MAAM,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAA;SAClC;KACF;IAED,OAAO,MAAM,CAAA;AACf,CAAC;AAZD,sBAYC;AAED;;GAEG;AACH,SAAgB,YAAY,CAAC,QAAmB,EAAE,QAAgB;IAChE,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,IAAI,CAAC,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,CAAA;AAClE,CAAC;AAFD,oCAEC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.d.ts b/node_modules/@actions/glob/lib/internal-pattern.d.ts new file mode 100644 index 0000000000..2c2224c84b --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.d.ts @@ -0,0 +1,64 @@ +import { MatchKind } from './internal-match-kind'; +export declare class Pattern { + /** + * Indicates whether matches should be excluded from the result set + */ + readonly negate: boolean; + /** + * The directory to search. The literal path prior to the first glob segment. + */ + readonly searchPath: string; + /** + * The path/pattern segments. Note, only the first segment (the root directory) + * may contain a directory separator character. Use the trailingSeparator field + * to determine whether the pattern ended with a trailing slash. + */ + readonly segments: string[]; + /** + * Indicates the pattern should only match directories, not regular files. + */ + readonly trailingSeparator: boolean; + /** + * The Minimatch object used for matching + */ + private readonly minimatch; + /** + * Used to workaround a limitation with Minimatch when determining a partial + * match and the path is a root directory. For example, when the pattern is + * `/foo/**` or `C:\foo\**` and the path is `/` or `C:\`. + */ + private readonly rootRegExp; + /** + * Indicates that the pattern is implicitly added as opposed to user specified. + */ + private readonly isImplicitPattern; + constructor(pattern: string); + constructor(pattern: string, isImplicitPattern: boolean, segments: undefined, homedir: string); + constructor(negate: boolean, isImplicitPattern: boolean, segments: string[], homedir?: string); + /** + * Matches the pattern against the specified path + */ + match(itemPath: string): MatchKind; + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath: string): boolean; + /** + * Escapes glob patterns within a path + */ + static globEscape(s: string): string; + /** + * Normalizes slashes and ensures absolute root + */ + private static fixupPattern; + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + private static getLiteral; + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + private static regExpEscape; +} diff --git a/node_modules/@actions/glob/lib/internal-pattern.js b/node_modules/@actions/glob/lib/internal-pattern.js new file mode 100644 index 0000000000..a17ffe9709 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js @@ -0,0 +1,255 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const os = __importStar(require("os")); +const path = __importStar(require("path")); +const pathHelper = __importStar(require("./internal-path-helper")); +const assert_1 = __importDefault(require("assert")); +const minimatch_1 = require("minimatch"); +const internal_match_kind_1 = require("./internal-match-kind"); +const internal_path_1 = require("./internal-path"); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert_1.default(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern, homedir); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + this.isImplicitPattern = isImplicitPattern; + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an absolute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern, homedir) { + // Empty + assert_1.default(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + homedir = homedir || os.homedir(); + assert_1.default(homedir, 'Unable to determine HOME directory'); + assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } + } + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } + } + // Otherwise fall thru + } + // Append + literal += c; + } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-pattern.js.map b/node_modules/@actions/glob/lib/internal-pattern.js.map new file mode 100644 index 0000000000..08e172d7bc --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-pattern.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-pattern.js","sourceRoot":"","sources":["../src/internal-pattern.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAAA,uCAAwB;AACxB,2CAA4B;AAC5B,mEAAoD;AACpD,oDAA2B;AAC3B,yCAA8E;AAC9E,+DAA+C;AAC/C,mDAAoC;AAEpC,MAAM,UAAU,GAAG,OAAO,CAAC,QAAQ,KAAK,OAAO,CAAA;AAE/C,MAAa,OAAO;IAqDlB,YACE,eAAiC,EACjC,iBAAiB,GAAG,KAAK,EACzB,QAAmB,EACnB,OAAgB;QAxDlB;;WAEG;QACM,WAAM,GAAY,KAAK,CAAA;QAuD9B,mBAAmB;QACnB,IAAI,OAAe,CAAA;QACnB,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;YACvC,OAAO,GAAG,eAAe,CAAC,IAAI,EAAE,CAAA;SACjC;QACD,oBAAoB;aACf;YACH,qBAAqB;YACrB,QAAQ,GAAG,QAAQ,IAAI,EAAE,CAAA;YACzB,gBAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,qCAAqC,CAAC,CAAA;YAC9D,MAAM,IAAI,GAAG,OAAO,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;YAC5C,gBAAM,CACJ,IAAI,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,EACxC,wDAAwD,CACzD,CAAA;YACD,OAAO,GAAG,IAAI,oBAAI,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAA;YAC9C,IAAI,eAAe,EAAE;gBACnB,OAAO,GAAG,IAAI,OAAO,EAAE,CAAA;aACxB;SACF;QAED,SAAS;QACT,OAAO,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YAC9B,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,MAAM,CAAA;YAC1B,OAAO,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAA;SACnC;QAED,8CAA8C;QAC9C,OAAO,GAAG,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,OAAO,CAAC,CAAA;QAEhD,WAAW;QACX,IAAI,CAAC,QAAQ,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAA;QAE1C,wFAAwF;QACxF,IAAI,CAAC,iBAAiB,GAAG,UAAU;aAChC,mBAAmB,CAAC,OAAO,CAAC;aAC5B,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACrB,OAAO,GAAG,UAAU,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAA;QAEvD,6DAA6D;QAC7D,IAAI,SAAS,GAAG,KAAK,CAAA;QACrB,MAAM,cAAc,GAAG,IAAI,CAAC,QAAQ;aACjC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;aAC/B,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,SAAS,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,EAAE,CAAC,CAAC,CAAA;QACrD,IAAI,CAAC,UAAU,GAAG,IAAI,oBAAI,CAAC,cAAc,CAAC,CAAC,QAAQ,EAAE,CAAA;QAErD,wDAAwD;QACxD,IAAI,CAAC,UAAU,GAAG,IAAI,MAAM,CAC1B,OAAO,CAAC,YAAY,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EACvC,UAAU,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACtB,CAAA;QAED,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAA;QAE1C,mBAAmB;QACnB,MAAM,gBAAgB,GAAsB;YAC1C,GAAG,EAAE,IAAI;YACT,OAAO,EAAE,IAAI;YACb,MAAM,EAAE,UAAU;YAClB,SAAS,EAAE,IAAI;YACf,KAAK,EAAE,IAAI;YACX,QAAQ,EAAE,IAAI;SACf,CAAA;QACD,OAAO,GAAG,UAAU,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,OAAO,CAAA;QAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,qBAAS,CAAC,OAAO,EAAE,gBAAgB,CAAC,CAAA;IAC3D,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAgB;QACpB,4BAA4B;QAC5B,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;YACpD,oBAAoB;YACpB,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAA;YAEnD,wFAAwF;YACxF,sFAAsF;YACtF,+FAA+F;YAC/F,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,iBAAiB,KAAK,KAAK,EAAE;gBACpE,uFAAuF;gBACvF,sFAAsF;gBACtF,QAAQ,GAAG,GAAG,QAAQ,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;aACpC;SACF;aAAM;YACL,wDAAwD;YACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;SAC1D;QAED,QAAQ;QACR,IAAI,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;YAClC,OAAO,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,+BAAS,CAAC,SAAS,CAAC,CAAC,CAAC,+BAAS,CAAC,GAAG,CAAA;SACpE;QAED,OAAO,+BAAS,CAAC,IAAI,CAAA;IACvB,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,QAAgB;QAC3B,wDAAwD;QACxD,QAAQ,GAAG,UAAU,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAA;QAEzD,+CAA+C;QAC/C,IAAI,UAAU,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE;YAC7C,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAA;SACtC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,CAC5B,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAC1C,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,EACrB,IAAI,CACL,CAAA;IACH,CAAC;IAED;;OAEG;IACH,MAAM,CAAC,UAAU,CAAC,CAAS;QACzB,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,4BAA4B;aAC5E,OAAO,CAAC,kBAAkB,EAAE,KAAK,CAAC,CAAC,sDAAsD;aACzF,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,aAAa;aACnC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAA,CAAC,aAAa;IACxC,CAAC;IAED;;OAEG;IACK,MAAM,CAAC,YAAY,CAAC,OAAe,EAAE,OAAgB;QAC3D,QAAQ;QACR,gBAAM,CAAC,OAAO,EAAE,yBAAyB,CAAC,CAAA;QAE1C,qDAAqD;QACrD,gCAAgC;QAChC,MAAM,eAAe,GAAG,IAAI,oBAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CACzD,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CACtB,CAAA;QACD,gBAAM,CACJ,eAAe,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,IAAI,CAAC,EACrE,oBAAoB,OAAO,kDAAkD,CAC9E,CAAA;QAED,kEAAkE;QAClE,gBAAM,CACJ,CAAC,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,eAAe,CAAC,CAAC,CAAC,EAClD,oBAAoB,OAAO,yCAAyC,CACrE,CAAA;QAED,oBAAoB;QACpB,OAAO,GAAG,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;QAEjD,8BAA8B;QAC9B,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YACzD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAChE;QACD,8BAA8B;aACzB,IAAI,OAAO,KAAK,GAAG,IAAI,OAAO,CAAC,UAAU,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE,CAAC,EAAE;YAC9D,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC,OAAO,EAAE,CAAA;YACjC,gBAAM,CAAC,OAAO,EAAE,oCAAoC,CAAC,CAAA;YACrD,gBAAM,CACJ,UAAU,CAAC,eAAe,CAAC,OAAO,CAAC,EACnC,wDAAwD,OAAO,GAAG,CACnE,CAAA;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAC1D;QACD,2DAA2D;aACtD,IACH,UAAU;YACV,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,IAAI,OAAO,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,EAC9D;YACA,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CACtC,gBAAgB,EAChB,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CACrB,CAAA;YACD,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBAC9C,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,mDAAmD;aAC9C,IAAI,UAAU,IAAI,CAAC,OAAO,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,EAAE;YACtE,IAAI,IAAI,GAAG,UAAU,CAAC,kBAAkB,CAAC,gBAAgB,EAAE,IAAI,CAAC,CAAA;YAChE,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;gBACxB,IAAI,IAAI,IAAI,CAAA;aACb;YACD,OAAO,GAAG,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACvD;QACD,iCAAiC;aAC5B;YACH,OAAO,GAAG,UAAU,CAAC,kBAAkB,CACrC,OAAO,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,EACjC,OAAO,CACR,CAAA;SACF;QAED,OAAO,UAAU,CAAC,mBAAmB,CAAC,OAAO,CAAC,CAAA;IAChD,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,UAAU,CAAC,OAAe;QACvC,IAAI,OAAO,GAAG,EAAE,CAAA;QAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAA;YACpB,SAAS;YACT,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBACvD,OAAO,IAAI,OAAO,CAAC,EAAE,CAAC,CAAC,CAAA;gBACvB,SAAQ;aACT;YACD,WAAW;iBACN,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,EAAE;gBAC/B,OAAO,EAAE,CAAA;aACV;YACD,gBAAgB;iBACX,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;gBAC5C,IAAI,GAAG,GAAG,EAAE,CAAA;gBACZ,IAAI,MAAM,GAAG,CAAC,CAAC,CAAA;gBACf,KAAK,IAAI,EAAE,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,EAAE,EAAE;oBAC9C,MAAM,EAAE,GAAG,OAAO,CAAC,EAAE,CAAC,CAAA;oBACtB,SAAS;oBACT,IAAI,EAAE,KAAK,IAAI,IAAI,CAAC,UAAU,IAAI,EAAE,GAAG,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE;wBACzD,GAAG,IAAI,OAAO,CAAC,EAAE,EAAE,CAAC,CAAA;wBACpB,SAAQ;qBACT;oBACD,SAAS;yBACJ,IAAI,EAAE,KAAK,GAAG,EAAE;wBACnB,MAAM,GAAG,EAAE,CAAA;wBACX,MAAK;qBACN;oBACD,YAAY;yBACP;wBACH,GAAG,IAAI,EAAE,CAAA;qBACV;iBACF;gBAED,UAAU;gBACV,IAAI,MAAM,IAAI,CAAC,EAAE;oBACf,iBAAiB;oBACjB,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,EAAE;wBAClB,OAAO,EAAE,CAAA;qBACV;oBAED,qBAAqB;oBACrB,IAAI,GAAG,EAAE;wBACP,OAAO,IAAI,GAAG,CAAA;wBACd,CAAC,GAAG,MAAM,CAAA;wBACV,SAAQ;qBACT;iBACF;gBAED,sBAAsB;aACvB;YAED,SAAS;YACT,OAAO,IAAI,CAAC,CAAA;SACb;QAED,OAAO,OAAO,CAAA;IAChB,CAAC;IAED;;;OAGG;IACK,MAAM,CAAC,YAAY,CAAC,CAAS;QACnC,OAAO,CAAC,CAAC,OAAO,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;IAC7C,CAAC;CACF;AAzUD,0BAyUC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.d.ts b/node_modules/@actions/glob/lib/internal-search-state.d.ts new file mode 100644 index 0000000000..aac344eb38 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.d.ts @@ -0,0 +1,5 @@ +export declare class SearchState { + readonly path: string; + readonly level: number; + constructor(path: string, level: number); +} diff --git a/node_modules/@actions/glob/lib/internal-search-state.js b/node_modules/@actions/glob/lib/internal-search-state.js new file mode 100644 index 0000000000..5d266ce1f3 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SearchState = void 0; +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map \ No newline at end of file diff --git a/node_modules/@actions/glob/lib/internal-search-state.js.map b/node_modules/@actions/glob/lib/internal-search-state.js.map new file mode 100644 index 0000000000..4421012462 --- /dev/null +++ b/node_modules/@actions/glob/lib/internal-search-state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"internal-search-state.js","sourceRoot":"","sources":["../src/internal-search-state.ts"],"names":[],"mappings":";;;AAAA,MAAa,WAAW;IAItB,YAAY,IAAY,EAAE,KAAa;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;QAChB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAA;IACpB,CAAC;CACF;AARD,kCAQC"} \ No newline at end of file diff --git a/node_modules/@actions/glob/package.json b/node_modules/@actions/glob/package.json new file mode 100644 index 0000000000..4dfd76aa7b --- /dev/null +++ b/node_modules/@actions/glob/package.json @@ -0,0 +1,43 @@ +{ + "name": "@actions/glob", + "version": "0.1.2", + "preview": true, + "description": "Actions glob lib", + "keywords": [ + "github", + "actions", + "glob" + ], + "homepage": "https://github.com/actions/toolkit/tree/main/packages/glob", + "license": "MIT", + "main": "lib/glob.js", + "types": "lib/glob.d.ts", + "directories": { + "lib": "lib", + "test": "__tests__" + }, + "files": [ + "lib", + "!.DS_Store" + ], + "publishConfig": { + "access": "public" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/actions/toolkit.git", + "directory": "packages/glob" + }, + "scripts": { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + "test": "echo \"Error: run tests from root\" && exit 1", + "tsc": "tsc" + }, + "bugs": { + "url": "https://github.com/actions/toolkit/issues" + }, + "dependencies": { + "@actions/core": "^1.2.6", + "minimatch": "^3.0.4" + } +} diff --git a/node_modules/@actions/http-client/package.json b/node_modules/@actions/http-client/package.json index f9db7e7702..c1de22136e 100644 --- a/node_modules/@actions/http-client/package.json +++ b/node_modules/@actions/http-client/package.json @@ -1,6 +1,6 @@ { "name": "@actions/http-client", - "version": "2.0.0", + "version": "2.0.1", "description": "Actions Http Client", "keywords": [ "github", @@ -25,7 +25,7 @@ "repository": { "type": "git", "url": "git+https://github.com/actions/toolkit.git", - "directory": "packages/github" + "directory": "packages/http-client" }, "scripts": { "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", @@ -40,7 +40,9 @@ }, "devDependencies": { "@types/tunnel": "0.0.3", - "proxy": "^1.0.1", - "tunnel": "0.0.6" + "proxy": "^1.0.1" + }, + "dependencies": { + "tunnel": "^0.0.6" } } diff --git a/node_modules/@azure/abort-controller/CHANGELOG.md b/node_modules/@azure/abort-controller/CHANGELOG.md new file mode 100644 index 0000000000..b76452300c --- /dev/null +++ b/node_modules/@azure/abort-controller/CHANGELOG.md @@ -0,0 +1,34 @@ +# Release History + +## 1.1.0 (2022-05-05) + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features +- With the dropping of support for Node.js versions that are no longer in LTS, the dependency on `@types/node` has been updated to version 12. Read our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +## 1.0.4 (2021-03-04) + +Fixes issue [13985](https://github.com/Azure/azure-sdk-for-js/issues/13985) where abort event listeners that removed themselves when invoked could prevent other event listeners from being invoked. + +## 1.0.3 (2021-02-23) + +Support Typescript version < 3.6 by down-leveling the type definition files. ([PR 12793](https://github.com/Azure/azure-sdk-for-js/pull/12793)) + +## 1.0.2 (2020-01-07) + +Updates the `tslib` dependency to version 2.x. + +## 1.0.1 (2019-12-04) + +Fixes the [bug 6271](https://github.com/Azure/azure-sdk-for-js/issues/6271) that can occur with angular prod builds due to triple-slash directives. +([PR 6344](https://github.com/Azure/azure-sdk-for-js/pull/6344)) + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/abort-controller` package. + +Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. +([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.2 (2019-09-09) + +Listeners attached to an `AbortSignal` now receive an event with the type `abort`. (PR #4756) diff --git a/node_modules/@azure/abort-controller/LICENSE b/node_modules/@azure/abort-controller/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/abort-controller/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/abort-controller/README.md b/node_modules/@azure/abort-controller/README.md new file mode 100644 index 0000000000..ce578d95b5 --- /dev/null +++ b/node_modules/@azure/abort-controller/README.md @@ -0,0 +1,110 @@ +# Azure Abort Controller client library for JavaScript + +The `@azure/abort-controller` package provides `AbortController` and `AbortSignal` classes. These classes are compatible +with the [AbortController](https://developer.mozilla.org/docs/Web/API/AbortController) built into modern browsers +and the `AbortSignal` used by [fetch](https://developer.mozilla.org/docs/Web/API/Fetch_API). +Use the `AbortController` class to create an instance of the `AbortSignal` class that can be used to cancel an operation +in an Azure SDK that accept a parameter of type `AbortSignalLike`. + +Key links: + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller) +- [Package (npm)](https://www.npmjs.com/package/@azure/abort-controller) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/overview/azure/abort-controller-readme) + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/abort-controller +``` + +## Key Concepts + +Use the `AbortController` to create an `AbortSignal` which can then be passed to Azure SDK operations to cancel +pending work. The `AbortSignal` can be accessed via the `signal` property on an instantiated `AbortController`. +An `AbortSignal` can also be returned directly from a static method, e.g. `AbortController.timeout(100)`. +that is cancelled after 100 milliseconds. + +Calling `abort()` on the instantiated `AbortController` invokes the registered `abort` +event listeners on the associated `AbortSignal`. +Any subsequent calls to `abort()` on the same controller will have no effect. + +The `AbortSignal.none` static property returns an `AbortSignal` that can not be aborted. + +Multiple instances of an `AbortSignal` can be linked so that calling `abort()` on the parent signal, +aborts all linked signals. +This linkage is one-way, meaning that a parent signal can affect a linked signal, but not the other way around. +To link `AbortSignals` together, pass in the parent signals to the `AbortController` constructor. + +## Examples + +The below examples assume that `doAsyncWork` is a function that takes a bag of properties, one of which is +of the abort signal. + +### Example 1 - basic usage + +```js +import { AbortController } from "@azure/abort-controller"; + +const controller = new AbortController(); +doAsyncWork({ abortSignal: controller.signal }); + +// at some point later +controller.abort(); +``` + +### Example 2 - Aborting with timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const signal = AbortController.timeout(1000); +doAsyncWork({ abortSignal: signal }); +``` + +### Example 3 - Aborting sub-tasks + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +const subTask1 = new AbortController(allTasksController.signal); +const subtask2 = new AbortController(allTasksController.signal); + +allTasksController.abort(); // aborts allTasksSignal, subTask1, subTask2 +subTask1.abort(); // aborts only subTask1 +``` + +### Example 4 - Aborting with parent signal or timeout + +```js +import { AbortController } from "@azure/abort-controller"; + +const allTasksController = new AbortController(); + +// create a subtask controller that can be aborted manually, +// or when either the parent task aborts or the timeout is reached. +const subTask = new AbortController(allTasksController.signal, AbortController.timeout(100)); + +allTasksController.abort(); // aborts allTasksSignal, subTask +subTask.abort(); // aborts only subTask +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fabort-controller%2FREADME.png) diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js new file mode 100644 index 0000000000..0d260f30db --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js @@ -0,0 +1,118 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortSignal, abortSignal } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} +//# sourceMappingURL=AbortController.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map new file mode 100644 index 0000000000..8a6f7d0d3d --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.js","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,WAAW,EAAE,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,OAAO,UAAW,SAAQ,KAAK;IACnC,YAAY,OAAgB;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,MAAM,OAAO,eAAe;IAW1B,6EAA6E;IAC7E,YAAY,aAAmB;QAC7B,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;SACR;QACD,qCAAqC;QACrC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACjC,8CAA8C;YAC9C,aAAa,GAAG,SAAS,CAAC;SAC3B;QACD,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,uDAAuD;YACvD,0CAA0C;YAC1C,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;aACd;iBAAM;gBACL,6DAA6D;gBAC7D,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;gBACf,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,MAAM,CAAC,OAAO,CAAC,EAAU;QAC9B,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;QAClD,uEAAuE;QACvE,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;SACf;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js new file mode 100644 index 0000000000..e97336ad11 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +export function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} +//# sourceMappingURL=AbortSignal.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map new file mode 100644 index 0000000000..1ca33bdf65 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.js","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,6CAA6C;AAI7C,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;GAYG;AACH,MAAM,OAAO,WAAW;IACtB;QA2BA;;WAEG;QACI,YAAO,GAAiC,IAAI,CAAC;QA7BlD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;QAC3B,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YACzB,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;IAC/B,CAAC;IAED;;;;OAIG;IACI,MAAM,KAAK,IAAI;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IAOD;;;;;OAKG;IACI,gBAAgB;IACrB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAC1C,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;OAKG;IACI,mBAAmB;IACxB,yCAAyC;IACzC,KAAc,EACd,QAAiD;QAEjD,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;YAC3B,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;SAC1E;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;QAC1C,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;YACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACH,aAAa,CAAC,MAAa;QACzB,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;IACJ,CAAC;CACF;AAED;;;;;;;;GAQG;AACH,wEAAwE;AACxE,MAAM,UAAU,WAAW,CAAC,MAAmB;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;KACR;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAC7B;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;IAC5C,IAAI,SAAS,EAAE;QACb,uDAAuD;QACvD,6DAA6D;QAC7D,aAAa;QACb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,EAAE;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;KACJ;IAED,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js b/node_modules/@azure/abort-controller/dist-esm/src/index.js new file mode 100644 index 0000000000..ddbf505bd7 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Changes to Aborter +// * Rename Aborter to AbortSignal +// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation +// * Remove withTimeout, it's moved to the controller +// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller. +// Potential changes to align with DOM Spec +// * dispatchEvent on Signal +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal } from "./AbortSignal"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist-esm/src/index.js.map b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map new file mode 100644 index 0000000000..def556e67a --- /dev/null +++ b/node_modules/@azure/abort-controller/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,qBAAqB;AACrB,kCAAkC;AAClC,uHAAuH;AACvH,qDAAqD;AACrD,2GAA2G;AAE3G,2CAA2C;AAC3C,4BAA4B;AAE5B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Changes to Aborter\n// * Rename Aborter to AbortSignal\n// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation\n// * Remove withTimeout, it's moved to the controller\n// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller.\n\n// Potential changes to align with DOM Spec\n// * dispatchEvent on Signal\n\nexport { AbortController, AbortError } from \"./AbortController\";\nexport { AbortSignal, AbortSignalLike } from \"./AbortSignal\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/dist/index.js b/node_modules/@azure/abort-controller/dist/index.js new file mode 100644 index 0000000000..650dd5f3e8 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js @@ -0,0 +1,239 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +const listenersMap = new WeakMap(); +const abortedMap = new WeakMap(); +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +class AbortSignal { + constructor() { + /** + * onabort event listener. + */ + this.onabort = null; + listenersMap.set(this, []); + abortedMap.set(this, false); + } + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted() { + if (!abortedMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + return abortedMap.get(this); + } + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none() { + return new AbortSignal(); + } + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + listeners.push(listener); + } + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener( + // tslint:disable-next-line:variable-name + _type, listener) { + if (!listenersMap.has(this)) { + throw new TypeError("Expected `this` to be an instance of AbortSignal."); + } + const listeners = listenersMap.get(this); + const index = listeners.indexOf(listener); + if (index > -1) { + listeners.splice(index, 1); + } + } + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event) { + throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); + } +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function abortSignal(signal) { + if (signal.aborted) { + return; + } + if (signal.onabort) { + signal.onabort.call(signal); + } + const listeners = listenersMap.get(signal); + if (listeners) { + // Create a copy of listeners so mutations to the array + // (e.g. via removeListener calls) don't affect the listeners + // we invoke. + listeners.slice().forEach((listener) => { + listener.call(signal, { type: "abort" }); + }); + } + abortedMap.set(signal, true); +} + +// Copyright (c) Microsoft Corporation. +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +class AbortError extends Error { + constructor(message) { + super(message); + this.name = "AbortError"; + } +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +class AbortController { + // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types + constructor(parentSignals) { + this._signal = new AbortSignal(); + if (!parentSignals) { + return; + } + // coerce parentSignals into an array + if (!Array.isArray(parentSignals)) { + // eslint-disable-next-line prefer-rest-params + parentSignals = arguments; + } + for (const parentSignal of parentSignals) { + // if the parent signal has already had abort() called, + // then call abort on this signal as well. + if (parentSignal.aborted) { + this.abort(); + } + else { + // when the parent signal aborts, this signal should as well. + parentSignal.addEventListener("abort", () => { + this.abort(); + }); + } + } + } + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal() { + return this._signal; + } + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort() { + abortSignal(this._signal); + } + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms) { + const signal = new AbortSignal(); + const timer = setTimeout(abortSignal, ms, signal); + // Prevent the active Timer from keeping the Node.js event loop active. + if (typeof timer.unref === "function") { + timer.unref(); + } + return signal; + } +} + +exports.AbortController = AbortController; +exports.AbortError = AbortError; +exports.AbortSignal = AbortSignal; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/abort-controller/dist/index.js.map b/node_modules/@azure/abort-controller/dist/index.js.map new file mode 100644 index 0000000000..148b005b64 --- /dev/null +++ b/node_modules/@azure/abort-controller/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/AbortSignal.ts","../src/AbortController.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// eslint-disable-next-line @typescript-eslint/triple-slash-reference\n/// \n\ntype AbortEventListener = (this: AbortSignalLike, ev?: any) => any;\n\nconst listenersMap = new WeakMap();\nconst abortedMap = new WeakMap();\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n /**\n * Indicates if the signal has already been aborted.\n */\n readonly aborted: boolean;\n /**\n * Add new \"abort\" event listener, only support \"abort\" event.\n */\n addEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n */\n removeEventListener(\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any,\n options?: any\n ): void;\n}\n\n/**\n * An aborter instance implements AbortSignal interface, can abort HTTP requests.\n *\n * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled.\n * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation\n * cannot or will not ever be cancelled.\n *\n * @example\n * Abort without timeout\n * ```ts\n * await doAsyncWork(AbortSignal.none);\n * ```\n */\nexport class AbortSignal implements AbortSignalLike {\n constructor() {\n listenersMap.set(this, []);\n abortedMap.set(this, false);\n }\n\n /**\n * Status of whether aborted or not.\n *\n * @readonly\n */\n public get aborted(): boolean {\n if (!abortedMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n return abortedMap.get(this)!;\n }\n\n /**\n * Creates a new AbortSignal instance that will never be aborted.\n *\n * @readonly\n */\n public static get none(): AbortSignal {\n return new AbortSignal();\n }\n\n /**\n * onabort event listener.\n */\n public onabort: ((ev?: Event) => any) | null = null;\n\n /**\n * Added new \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be added\n */\n public addEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n listeners.push(listener);\n }\n\n /**\n * Remove \"abort\" event listener, only support \"abort\" event.\n *\n * @param _type - Only support \"abort\" event\n * @param listener - The listener to be removed\n */\n public removeEventListener(\n // tslint:disable-next-line:variable-name\n _type: \"abort\",\n listener: (this: AbortSignalLike, ev: any) => any\n ): void {\n if (!listenersMap.has(this)) {\n throw new TypeError(\"Expected `this` to be an instance of AbortSignal.\");\n }\n\n const listeners = listenersMap.get(this)!;\n\n const index = listeners.indexOf(listener);\n if (index > -1) {\n listeners.splice(index, 1);\n }\n }\n\n /**\n * Dispatches a synthetic event to the AbortSignal.\n */\n dispatchEvent(_event: Event): boolean {\n throw new Error(\n \"This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.\"\n );\n }\n}\n\n/**\n * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.\n * Will try to trigger abort event for all linked AbortSignal nodes.\n *\n * - If there is a timeout, the timer will be cancelled.\n * - If aborted is true, nothing will happen.\n *\n * @internal\n */\n// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters\nexport function abortSignal(signal: AbortSignal): void {\n if (signal.aborted) {\n return;\n }\n\n if (signal.onabort) {\n signal.onabort.call(signal);\n }\n\n const listeners = listenersMap.get(signal)!;\n if (listeners) {\n // Create a copy of listeners so mutations to the array\n // (e.g. via removeListener calls) don't affect the listeners\n // we invoke.\n listeners.slice().forEach((listener) => {\n listener.call(signal, { type: \"abort\" });\n });\n }\n\n abortedMap.set(signal, true);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignal, AbortSignalLike, abortSignal } from \"./AbortSignal\";\n\n/**\n * This error is thrown when an asynchronous operation has been aborted.\n * Check for this error by testing the `name` that the name property of the\n * error matches `\"AbortError\"`.\n *\n * @example\n * ```ts\n * const controller = new AbortController();\n * controller.abort();\n * try {\n * doAsyncWork(controller.signal)\n * } catch (e) {\n * if (e.name === 'AbortError') {\n * // handle abort error here.\n * }\n * }\n * ```\n */\nexport class AbortError extends Error {\n constructor(message?: string) {\n super(message);\n this.name = \"AbortError\";\n }\n}\n\n/**\n * An AbortController provides an AbortSignal and the associated controls to signal\n * that an asynchronous operation should be aborted.\n *\n * @example\n * Abort an operation when another event fires\n * ```ts\n * const controller = new AbortController();\n * const signal = controller.signal;\n * doAsyncWork(signal);\n * button.addEventListener('click', () => controller.abort());\n * ```\n *\n * @example\n * Share aborter cross multiple operations in 30s\n * ```ts\n * // Upload the same data to 2 different data centers at the same time,\n * // abort another when any of them is finished\n * const controller = AbortController.withTimeout(30 * 1000);\n * doAsyncWork(controller.signal).then(controller.abort);\n * doAsyncWork(controller.signal).then(controller.abort);\n *```\n *\n * @example\n * Cascaded aborting\n * ```ts\n * // All operations can't take more than 30 seconds\n * const aborter = Aborter.timeout(30 * 1000);\n *\n * // Following 2 operations can't take more than 25 seconds\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * await doAsyncWork(aborter.withTimeout(25 * 1000));\n * ```\n */\nexport class AbortController {\n private _signal: AbortSignal;\n\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(parentSignals?: AbortSignalLike[]);\n /**\n * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller.\n */\n constructor(...parentSignals: AbortSignalLike[]);\n // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types\n constructor(parentSignals?: any) {\n this._signal = new AbortSignal();\n\n if (!parentSignals) {\n return;\n }\n // coerce parentSignals into an array\n if (!Array.isArray(parentSignals)) {\n // eslint-disable-next-line prefer-rest-params\n parentSignals = arguments;\n }\n for (const parentSignal of parentSignals) {\n // if the parent signal has already had abort() called,\n // then call abort on this signal as well.\n if (parentSignal.aborted) {\n this.abort();\n } else {\n // when the parent signal aborts, this signal should as well.\n parentSignal.addEventListener(\"abort\", () => {\n this.abort();\n });\n }\n }\n }\n\n /**\n * The AbortSignal associated with this controller that will signal aborted\n * when the abort method is called on this controller.\n *\n * @readonly\n */\n public get signal(): AbortSignal {\n return this._signal;\n }\n\n /**\n * Signal that any operations passed this controller's associated abort signal\n * to cancel any remaining work and throw an `AbortError`.\n */\n abort(): void {\n abortSignal(this._signal);\n }\n\n /**\n * Creates a new AbortSignal instance that will abort after the provided ms.\n * @param ms - Elapsed time in milliseconds to trigger an abort.\n */\n public static timeout(ms: number): AbortSignal {\n const signal = new AbortSignal();\n const timer = setTimeout(abortSignal, ms, signal);\n // Prevent the active Timer from keeping the Node.js event loop active.\n if (typeof timer.unref === \"function\") {\n timer.unref();\n }\n return signal;\n }\n}\n"],"names":[],"mappings":";;;;AAAA;AACA;AAGA;AAIA,MAAM,YAAY,GAAG,IAAI,OAAO,EAAqC,CAAC;AACtE,MAAM,UAAU,GAAG,IAAI,OAAO,EAAwB,CAAC;AA6BvD;;;;;;;;;;;;AAYG;MACU,WAAW,CAAA;AACtB,IAAA,WAAA,GAAA;AA2BA;;AAEG;QACI,IAAO,CAAA,OAAA,GAAiC,IAAI,CAAC;AA7BlD,QAAA,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;AAC3B,QAAA,UAAU,CAAC,GAAG,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;KAC7B;AAED;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACzB,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;AAED,QAAA,OAAO,UAAU,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;KAC9B;AAED;;;;AAIG;AACI,IAAA,WAAW,IAAI,GAAA;QACpB,OAAO,IAAI,WAAW,EAAE,CAAC;KAC1B;AAOD;;;;;AAKG;IACI,gBAAgB;;AAErB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;AAC1C,QAAA,SAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC1B;AAED;;;;;AAKG;IACI,mBAAmB;;AAExB,IAAA,KAAc,EACd,QAAiD,EAAA;AAEjD,QAAA,IAAI,CAAC,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AAC3B,YAAA,MAAM,IAAI,SAAS,CAAC,mDAAmD,CAAC,CAAC;AAC1E,SAAA;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC;QAE1C,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AAC1C,QAAA,IAAI,KAAK,GAAG,CAAC,CAAC,EAAE;AACd,YAAA,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;AACH,IAAA,aAAa,CAAC,MAAa,EAAA;AACzB,QAAA,MAAM,IAAI,KAAK,CACb,kHAAkH,CACnH,CAAC;KACH;AACF,CAAA;AAED;;;;;;;;AAQG;AACH;AACM,SAAU,WAAW,CAAC,MAAmB,EAAA;IAC7C,IAAI,MAAM,CAAC,OAAO,EAAE;QAClB,OAAO;AACR,KAAA;IAED,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,QAAA,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC7B,KAAA;IAED,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,MAAM,CAAE,CAAC;AAC5C,IAAA,IAAI,SAAS,EAAE;;;;QAIb,SAAS,CAAC,KAAK,EAAE,CAAC,OAAO,CAAC,CAAC,QAAQ,KAAI;YACrC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;AACJ,KAAA;AAED,IAAA,UAAU,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;AAC/B;;ACtKA;AAKA;;;;;;;;;;;;;;;;;AAiBG;AACG,MAAO,UAAW,SAAQ,KAAK,CAAA;AACnC,IAAA,WAAA,CAAY,OAAgB,EAAA;QAC1B,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;KAC1B;AACF,CAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCG;MACU,eAAe,CAAA;;AAY1B,IAAA,WAAA,CAAY,aAAmB,EAAA;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAEjC,IAAI,CAAC,aAAa,EAAE;YAClB,OAAO;AACR,SAAA;;AAED,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;;YAEjC,aAAa,GAAG,SAAS,CAAC;AAC3B,SAAA;AACD,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;;;YAGxC,IAAI,YAAY,CAAC,OAAO,EAAE;gBACxB,IAAI,CAAC,KAAK,EAAE,CAAC;AACd,aAAA;AAAM,iBAAA;;AAEL,gBAAA,YAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,MAAK;oBAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;AACf,iBAAC,CAAC,CAAC;AACJ,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;QACf,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;KAC3B;AAED;;;AAGG;IACI,OAAO,OAAO,CAAC,EAAU,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,EAAE,CAAC;QACjC,MAAM,KAAK,GAAG,UAAU,CAAC,WAAW,EAAE,EAAE,EAAE,MAAM,CAAC,CAAC;;AAElD,QAAA,IAAI,OAAO,KAAK,CAAC,KAAK,KAAK,UAAU,EAAE;YACrC,KAAK,CAAC,KAAK,EAAE,CAAC;AACf,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt b/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/README.md b/node_modules/@azure/abort-controller/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js b/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json b/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/package.json b/node_modules/@azure/abort-controller/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/abort-controller/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/abort-controller/package.json new file mode 100644 index 0000000000..6126bc2787 --- /dev/null +++ b/node_modules/@azure/abort-controller/package.json @@ -0,0 +1,104 @@ +{ + "name": "@azure/abort-controller", + "sdk-type": "client", + "version": "1.1.0", + "description": "Microsoft Azure SDK for JavaScript - Aborter", + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/src types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "types": "./types/src/index.d.ts", + "typesVersions": { + "<3.6": { + "types/src/*": [ + "types/3.1/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "shims-public.d.ts", + "types/src", + "types/3.1", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=12.0.0" + }, + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "aborter", + "abortsignal", + "cancellation", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", + "sideEffects": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "ts-node": "^10.0.0", + "typescript": "~4.6.0" + } +} diff --git a/node_modules/@azure/abort-controller/shims-public.d.ts b/node_modules/@azure/abort-controller/shims-public.d.ts new file mode 100644 index 0000000000..002bec375f --- /dev/null +++ b/node_modules/@azure/abort-controller/shims-public.d.ts @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// forward declaration of Event in case DOM libs are not present. +interface Event {} diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts new file mode 100644 index 0000000000..6f935db074 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /* + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + readonly signal: AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts new file mode 100644 index 0000000000..53d6a77848 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /* + * Status of whether aborted or not. + * + * @readonly + */ + readonly aborted: boolean; + /* + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static readonly none: AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/3.1/index.d.ts b/node_modules/@azure/abort-controller/types/3.1/index.d.ts new file mode 100644 index 0000000000..1345f47f97 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/3.1/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts new file mode 100644 index 0000000000..c07beb907a --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts @@ -0,0 +1,85 @@ +import { AbortSignal, AbortSignalLike } from "./AbortSignal"; +/** + * This error is thrown when an asynchronous operation has been aborted. + * Check for this error by testing the `name` that the name property of the + * error matches `"AbortError"`. + * + * @example + * ```ts + * const controller = new AbortController(); + * controller.abort(); + * try { + * doAsyncWork(controller.signal) + * } catch (e) { + * if (e.name === 'AbortError') { + * // handle abort error here. + * } + * } + * ``` + */ +export declare class AbortError extends Error { + constructor(message?: string); +} +/** + * An AbortController provides an AbortSignal and the associated controls to signal + * that an asynchronous operation should be aborted. + * + * @example + * Abort an operation when another event fires + * ```ts + * const controller = new AbortController(); + * const signal = controller.signal; + * doAsyncWork(signal); + * button.addEventListener('click', () => controller.abort()); + * ``` + * + * @example + * Share aborter cross multiple operations in 30s + * ```ts + * // Upload the same data to 2 different data centers at the same time, + * // abort another when any of them is finished + * const controller = AbortController.withTimeout(30 * 1000); + * doAsyncWork(controller.signal).then(controller.abort); + * doAsyncWork(controller.signal).then(controller.abort); + *``` + * + * @example + * Cascaded aborting + * ```ts + * // All operations can't take more than 30 seconds + * const aborter = Aborter.timeout(30 * 1000); + * + * // Following 2 operations can't take more than 25 seconds + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * await doAsyncWork(aborter.withTimeout(25 * 1000)); + * ``` + */ +export declare class AbortController { + private _signal; + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(parentSignals?: AbortSignalLike[]); + /** + * @param parentSignals - The AbortSignals that will signal aborted on the AbortSignal associated with this controller. + */ + constructor(...parentSignals: AbortSignalLike[]); + /** + * The AbortSignal associated with this controller that will signal aborted + * when the abort method is called on this controller. + * + * @readonly + */ + get signal(): AbortSignal; + /** + * Signal that any operations passed this controller's associated abort signal + * to cancel any remaining work and throw an `AbortError`. + */ + abort(): void; + /** + * Creates a new AbortSignal instance that will abort after the provided ms. + * @param ms - Elapsed time in milliseconds to trigger an abort. + */ + static timeout(ms: number): AbortSignal; +} +//# sourceMappingURL=AbortController.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map new file mode 100644 index 0000000000..cb855afa15 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortController.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortController.d.ts","sourceRoot":"","sources":["../../src/AbortController.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAe,MAAM,eAAe,CAAC;AAE1E;;;;;;;;;;;;;;;;;GAiBG;AACH,qBAAa,UAAW,SAAQ,KAAK;gBACvB,OAAO,CAAC,EAAE,MAAM;CAI7B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAiCG;AACH,qBAAa,eAAe;IAC1B,OAAO,CAAC,OAAO,CAAc;IAE7B;;OAEG;gBACS,aAAa,CAAC,EAAE,eAAe,EAAE;IAC7C;;OAEG;gBACS,GAAG,aAAa,EAAE,eAAe,EAAE;IA2B/C;;;;;OAKG;IACH,IAAW,MAAM,IAAI,WAAW,CAE/B;IAED;;;OAGG;IACH,KAAK,IAAI,IAAI;IAIb;;;OAGG;WACW,OAAO,CAAC,EAAE,EAAE,MAAM,GAAG,WAAW;CAS/C"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts new file mode 100644 index 0000000000..7d31cb1377 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts @@ -0,0 +1,80 @@ +/// +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + /** + * Indicates if the signal has already been aborted. + */ + readonly aborted: boolean; + /** + * Add new "abort" event listener, only support "abort" event. + */ + addEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; + /** + * Remove "abort" event listener, only support "abort" event. + */ + removeEventListener(type: "abort", listener: (this: AbortSignalLike, ev: any) => any, options?: any): void; +} +/** + * An aborter instance implements AbortSignal interface, can abort HTTP requests. + * + * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. + * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation + * cannot or will not ever be cancelled. + * + * @example + * Abort without timeout + * ```ts + * await doAsyncWork(AbortSignal.none); + * ``` + */ +export declare class AbortSignal implements AbortSignalLike { + constructor(); + /** + * Status of whether aborted or not. + * + * @readonly + */ + get aborted(): boolean; + /** + * Creates a new AbortSignal instance that will never be aborted. + * + * @readonly + */ + static get none(): AbortSignal; + /** + * onabort event listener. + */ + onabort: ((ev?: Event) => any) | null; + /** + * Added new "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be added + */ + addEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Remove "abort" event listener, only support "abort" event. + * + * @param _type - Only support "abort" event + * @param listener - The listener to be removed + */ + removeEventListener(_type: "abort", listener: (this: AbortSignalLike, ev: any) => any): void; + /** + * Dispatches a synthetic event to the AbortSignal. + */ + dispatchEvent(_event: Event): boolean; +} +/** + * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. + * Will try to trigger abort event for all linked AbortSignal nodes. + * + * - If there is a timeout, the timer will be cancelled. + * - If aborted is true, nothing will happen. + * + * @internal + */ +export declare function abortSignal(signal: AbortSignal): void; +//# sourceMappingURL=AbortSignal.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map new file mode 100644 index 0000000000..c82bea3a06 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/AbortSignal.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"AbortSignal.d.ts","sourceRoot":"","sources":["../../src/AbortSignal.ts"],"names":[],"mappings":";AAWA;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CACd,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;IACR;;OAEG;IACH,mBAAmB,CACjB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,EACjD,OAAO,CAAC,EAAE,GAAG,GACZ,IAAI,CAAC;CACT;AAED;;;;;;;;;;;;GAYG;AACH,qBAAa,WAAY,YAAW,eAAe;;IAMjD;;;;OAIG;IACH,IAAW,OAAO,IAAI,OAAO,CAM5B;IAED;;;;OAIG;IACH,WAAkB,IAAI,IAAI,WAAW,CAEpC;IAED;;OAEG;IACI,OAAO,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAQ;IAEpD;;;;;OAKG;IACI,gBAAgB,CAErB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IASP;;;;;OAKG;IACI,mBAAmB,CAExB,KAAK,EAAE,OAAO,EACd,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,GAAG,KAAK,GAAG,GAChD,IAAI;IAaP;;OAEG;IACH,aAAa,CAAC,MAAM,EAAE,KAAK,GAAG,OAAO;CAKtC;AAED;;;;;;;;GAQG;AAEH,wBAAgB,WAAW,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,CAoBrD"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts b/node_modules/@azure/abort-controller/types/src/index.d.ts new file mode 100644 index 0000000000..e5afaf434a --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts @@ -0,0 +1,3 @@ +export { AbortController, AbortError } from "./AbortController"; +export { AbortSignal, AbortSignalLike } from "./AbortSignal"; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/index.d.ts.map b/node_modules/@azure/abort-controller/types/src/index.d.ts.map new file mode 100644 index 0000000000..a06293929b --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAChE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json new file mode 100644 index 0000000000..7b5aee3240 --- /dev/null +++ b/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.18.11" + } + ] +} diff --git a/node_modules/@azure/core-auth/CHANGELOG.md b/node_modules/@azure/core-auth/CHANGELOG.md new file mode 100644 index 0000000000..ac1d8d7392 --- /dev/null +++ b/node_modules/@azure/core-auth/CHANGELOG.md @@ -0,0 +1,61 @@ +# Release History + +## 1.3.2 (2021-07-01) + +- Added `tenantId` optional property to the `GetTokenOptions` interface. If `tenantId` is set, credentials will be able to use multi-tenant authentication, in the cases when it's enabled. + +## 1.3.0 (2021-03-30) + +- Adds the `AzureNamedKeyCredential` class which supports credential rotation and a corresponding `NamedKeyCredential` interface to support the use of static string-based names and keys in Azure clients. +- Adds the `isNamedKeyCredential` and `isSASCredential` typeguard functions similar to the existing `isTokenCredential`. + +## 1.2.0 (2021-02-08) + +- Add `AzureSASCredential` and `SASCredential` for use by service clients which allow authenticiation using a shared access signature. + +## 1.1.4 (2021-01-07) + +- Removed direct dependency on `@opentelemetry/api` and `@azure/core-tracing`. + +## 1.1.3 (2020-06-30) + +- Fix this library to be compatible with ES5 ([#8975](https://github.com/Azure/azure-sdk-for-js/pull/8975)) + +## 1.1.2 (2020-04-28) + +- Remove the below interfaces from the public API of this package as they are defined elsewhere. + Fixes [bug 8301](https://github.com/Azure/azure-sdk-for-js/issues/8301). + - OperationOptions + - OperationRequestOptions + - OperationTracingOptions + - AbortSignalLike + +## 1.1.1 (2020-04-01) + +- Provided down-leveled type declaration files for users of older TypeScript versions between 3.1 and 3.6. + +## 1.1.0 (2020-03-31) + +- Added an `AzureKeyCredential` class that supports credential rotation and a corresponding `KeyCredential` interface to support the use of static string-based keys in Azure clients. + +## 1.0.2 (2019-12-03) + +- Updated to use OpenTelemetry 0.2 via `@azure/core-tracing` + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/core-auth` package. + +- Standardizes API to be more consistent with other SDK packages. + ([PR #5899](https://github.com/Azure/azure-sdk-for-js/pull/5899)) +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.4 (2019-10-22) + +- Removed the `SimpleTokenCredential` implementation since it is not useful outside of test scenarios +- Updated to use the latest version of `@azure/core-tracing` package + +## 1.0.0-preview.3 (2019-09-09) + +- Fixed a ping timeout issue. The timeout is now configurable. ([PR #4941](https://github.com/Azure/azure-sdk-for-js/pull/4941)) diff --git a/node_modules/@azure/core-auth/LICENSE b/node_modules/@azure/core-auth/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/core-auth/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-auth/README.md b/node_modules/@azure/core-auth/README.md new file mode 100644 index 0000000000..31ccc7f3fd --- /dev/null +++ b/node_modules/@azure/core-auth/README.md @@ -0,0 +1,78 @@ +# Azure Core Authentication client library for JavaScript + +The `@azure/core-auth` package provides core interfaces and helper methods for authenticating with Azure services using Azure Active Directory and other authentication schemes common across the Azure SDK. As a "core" library, it shouldn't need to be added as a dependency to any user code, only other Azure SDK libraries. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-auth +``` + +## Key Concepts + +The `TokenCredential` interface represents a credential capable of providing an authentication token. The `@azure/identity` package contains various credentials that implement the `TokenCredential` interface. + +The `AzureKeyCredential` is a static key-based credential that supports key rotation via the `update` method. Use this when a single secret value is needed for authentication, e.g. when using a shared access key. + +The `AzureNamedKeyCredential` is a static name/key-based credential that supports name and key rotation via the `update` method. Use this when both a secret value and a label are needed, e.g. when using a shared access key and shared access key name. + +The `AzureSASCredential` is a static signature-based credential that supports updating the signature value via the `update` method. Use this when using a shared access signature. + +## Examples + +### AzureKeyCredential + +```js +const { AzureKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureKeyCredential("secret value"); +// prints: "secret value" +console.log(credential.key); +credential.update("other secret value"); +// prints: "other secret value" +console.log(credential.key); +``` + +### AzureNamedKeyCredential + +```js +const { AzureNamedKeyCredential } = require("@azure/core-auth"); + +const credential = new AzureNamedKeyCredential("ManagedPolicy", "secret value"); +// prints: "ManagedPolicy, secret value" +console.log(`${credential.name}, ${credential.key}`); +credential.update("OtherManagedPolicy", "other secret value"); +// prints: "OtherManagedPolicy, other secret value" +console.log(`${credential.name}, ${credential.key}`); +``` + +### AzureSASCredential + +```js +const { AzureSASCredential } = require("@azure/core-auth"); + +const credential = new AzureSASCredential("signature1"); +// prints: "signature1" +console.log(credential.signature); +credential.update("signature2"); +// prints: "signature2" +console.log(credential.signature); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-auth%2FREADME.png) diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js new file mode 100644 index 0000000000..bb9a9fefeb --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js @@ -0,0 +1,38 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export class AzureKeyCredential { + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} +//# sourceMappingURL=azureKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map new file mode 100644 index 0000000000..7b129510af --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureKeyCredential.js","sourceRoot":"","sources":["../../src/azureKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IAU7B;;;;;OAKG;IACH,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IAnBD;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAgBD;;;;;;;OAOG;IACI,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js new file mode 100644 index 0000000000..31f93648ea --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js @@ -0,0 +1,62 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "./typeguards"; +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export class AzureNamedKeyCredential { + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} +//# sourceMappingURL=azureNamedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map new file mode 100644 index 0000000000..0288da56da --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureNamedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureNamedKeyCredential.js","sourceRoot":"","sources":["../../src/azureNamedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,cAAc,CAAC;AAgBtD;;;GAGG;AACH,MAAM,OAAO,uBAAuB;IAkBlC;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE;YACjB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;SAC/D;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;IAClB,CAAC;IA5BD;;OAEG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAkBD;;;;;;;;OAQG;IACI,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;SACrE;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;IACrB,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,UAAmB;IACtD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,CACpC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js new file mode 100644 index 0000000000..d2b370602b --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObjectWithProperties } from "./typeguards"; +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export class AzureSASCredential { + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} +//# sourceMappingURL=azureSASCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map new file mode 100644 index 0000000000..faa5d51510 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/azureSASCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureSASCredential.js","sourceRoot":"","sources":["../../src/azureSASCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,sBAAsB,EAAE,MAAM,cAAc,CAAC;AAYtD;;;GAGG;AACH,MAAM,OAAO,kBAAkB;IAU7B;;;;;OAKG;IACH,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAnBD;;OAEG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;IACzB,CAAC;IAgBD;;;;;;;OAOG;IACI,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;IACjC,CAAC;CACF;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,UAAmB;IACjD,OAAO,CACL,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,CAC9F,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/index.js b/node_modules/@azure/core-auth/dist-esm/src/index.js new file mode 100644 index 0000000000..a9c837b960 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/index.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AzureKeyCredential } from "./azureKeyCredential"; +export { AzureNamedKeyCredential, isNamedKeyCredential } from "./azureNamedKeyCredential"; +export { AzureSASCredential, isSASCredential } from "./azureSASCredential"; +export { isTokenCredential } from "./tokenCredential"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/index.js.map b/node_modules/@azure/core-auth/dist-esm/src/index.js.map new file mode 100644 index 0000000000..1b8858164e --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAiB,MAAM,sBAAsB,CAAC;AACzE,OAAO,EACL,uBAAuB,EAEvB,oBAAoB,EACrB,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAE,kBAAkB,EAAiB,eAAe,EAAE,MAAM,sBAAsB,CAAC;AAE1F,OAAO,EAIL,iBAAiB,EAClB,MAAM,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AzureKeyCredential, KeyCredential } from \"./azureKeyCredential\";\nexport {\n AzureNamedKeyCredential,\n NamedKeyCredential,\n isNamedKeyCredential\n} from \"./azureNamedKeyCredential\";\nexport { AzureSASCredential, SASCredential, isSASCredential } from \"./azureSASCredential\";\n\nexport {\n TokenCredential,\n GetTokenOptions,\n AccessToken,\n isTokenCredential\n} from \"./tokenCredential\";\n\nexport { SpanContext, SpanOptions, SpanAttributes, Context, SpanAttributeValue } from \"./tracing\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js new file mode 100644 index 0000000000..5ca7f68d1d --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} +//# sourceMappingURL=tokenCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map new file mode 100644 index 0000000000..68878d6922 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tokenCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredential.js","sourceRoot":"","sources":["../../src/tokenCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA2ElC;;;;GAIG;AACH,MAAM,UAAU,iBAAiB,CAAC,UAAmB;IACnD,mEAAmE;IACnE,gEAAgE;IAChE,sEAAsE;IACtE,qEAAqE;IACrE,sDAAsD;IACtD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,OAAO,CACL,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;QAC7C,CAAC,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,CACjF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tracing.js b/node_modules/@azure/core-auth/dist-esm/src/tracing.js new file mode 100644 index 0000000000..377718d175 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tracing.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map b/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map new file mode 100644 index 0000000000..18a1530707 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../src/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// The interfaces in this file should be kept in sync with those\n// found in the `@azure/core-tracing` package.\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Span attributes.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans.\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry.\n */\nexport declare interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n}\n\n/**\n * An interface structurally compatible with OpenTelemetry.\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/typeguards.js b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js new file mode 100644 index 0000000000..551fe89ce5 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * Note: The properties may be inherited. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + * @internal + */ +export function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * Note: The property may be inherited. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + * @internal + */ +function objectHasProperty(thing, property) { + return typeof thing === "object" && property in thing; +} +//# sourceMappingURL=typeguards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map new file mode 100644 index 0000000000..fe5158a975 --- /dev/null +++ b/node_modules/@azure/core-auth/dist-esm/src/typeguards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.js","sourceRoot":"","sources":["../../src/typeguards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,SAAS,SAAS,CAAI,KAA2B;IAC/C,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAClD,OAAO,KAAK,CAAC;KACd;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE;QACjC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;GAMG;AACH,SAAS,iBAAiB,CACxB,KAAY,EACZ,QAAsB;IAEtB,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAAC;AACrF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n * @internal\n */\nfunction isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * Note: The properties may be inherited.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n * @internal\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[]\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * Note: The property may be inherited.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n * @internal\n */\nfunction objectHasProperty(\n thing: Thing,\n property: PropertyName\n): thing is Thing & Record {\n return typeof thing === \"object\" && property in (thing as Record);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/dist/index.js b/node_modules/@azure/core-auth/dist/index.js new file mode 100644 index 0000000000..d074ccd00e --- /dev/null +++ b/node_modules/@azure/core-auth/dist/index.js @@ -0,0 +1,215 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +class AzureKeyCredential { + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key) { + if (!key) { + throw new Error("key must be a non-empty string"); + } + this._key = key; + } + /** + * The value of the key to be used in authentication + */ + get key() { + return this._key; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey) { + this._key = newKey; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if something is defined or not. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified properties. + * Note: The properties may be inherited. + * @param thing - Anything. + * @param properties - The name of the properties that should appear in the object. + * @internal + */ +function isObjectWithProperties(thing, properties) { + if (!isDefined(thing) || typeof thing !== "object") { + return false; + } + for (const property of properties) { + if (!objectHasProperty(thing, property)) { + return false; + } + } + return true; +} +/** + * Helper TypeGuard that checks if the input is an object with the specified property. + * Note: The property may be inherited. + * @param thing - Any object. + * @param property - The name of the property that should appear in the object. + * @internal + */ +function objectHasProperty(thing, property) { + return typeof thing === "object" && property in thing; +} + +// Copyright (c) Microsoft Corporation. +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +class AzureNamedKeyCredential { + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name, key) { + if (!name || !key) { + throw new TypeError("name and key must be non-empty strings"); + } + this._name = name; + this._key = key; + } + /** + * The value of the key to be used in authentication. + */ + get key() { + return this._key; + } + /** + * The value of the name to be used in authentication. + */ + get name() { + return this._name; + } + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName, newKey) { + if (!newName || !newKey) { + throw new TypeError("newName and newKey must be non-empty strings"); + } + this._name = newName; + this._key = newKey; + } +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +function isNamedKeyCredential(credential) { + return (isObjectWithProperties(credential, ["name", "key"]) && + typeof credential.key === "string" && + typeof credential.name === "string"); +} + +// Copyright (c) Microsoft Corporation. +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +class AzureSASCredential { + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature) { + if (!signature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = signature; + } + /** + * The value of the shared access signature to be used in authentication + */ + get signature() { + return this._signature; + } + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature) { + if (!newSignature) { + throw new Error("shared access signature must be a non-empty string"); + } + this._signature = newSignature; + } +} +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +function isSASCredential(credential) { + return (isObjectWithProperties(credential, ["signature"]) && typeof credential.signature === "string"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +exports.AzureKeyCredential = AzureKeyCredential; +exports.AzureNamedKeyCredential = AzureNamedKeyCredential; +exports.AzureSASCredential = AzureSASCredential; +exports.isNamedKeyCredential = isNamedKeyCredential; +exports.isSASCredential = isSASCredential; +exports.isTokenCredential = isTokenCredential; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-auth/dist/index.js.map b/node_modules/@azure/core-auth/dist/index.js.map new file mode 100644 index 0000000000..f80a805b04 --- /dev/null +++ b/node_modules/@azure/core-auth/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/azureKeyCredential.ts","../src/typeguards.ts","../src/azureNamedKeyCredential.ts","../src/azureSASCredential.ts","../src/tokenCredential.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Represents a credential defined by a static API key.\n */\nexport interface KeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n}\n\n/**\n * A static-key-based credential that supports updating\n * the underlying key value.\n */\nexport class AzureKeyCredential implements KeyCredential {\n private _key: string;\n\n /**\n * The value of the key to be used in authentication\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * Create an instance of an AzureKeyCredential for use\n * with a service client.\n *\n * @param key - The initial value of the key to use in authentication\n */\n constructor(key: string) {\n if (!key) {\n throw new Error(\"key must be a non-empty string\");\n }\n\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newKey - The new key value to be used\n */\n public update(newKey: string): void {\n this._key = newKey;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if something is defined or not.\n * @param thing - Anything\n * @internal\n */\nfunction isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified properties.\n * Note: The properties may be inherited.\n * @param thing - Anything.\n * @param properties - The name of the properties that should appear in the object.\n * @internal\n */\nexport function isObjectWithProperties(\n thing: Thing,\n properties: PropertyName[]\n): thing is Thing & Record {\n if (!isDefined(thing) || typeof thing !== \"object\") {\n return false;\n }\n\n for (const property of properties) {\n if (!objectHasProperty(thing, property)) {\n return false;\n }\n }\n\n return true;\n}\n\n/**\n * Helper TypeGuard that checks if the input is an object with the specified property.\n * Note: The property may be inherited.\n * @param thing - Any object.\n * @param property - The name of the property that should appear in the object.\n * @internal\n */\nfunction objectHasProperty(\n thing: Thing,\n property: PropertyName\n): thing is Thing & Record {\n return typeof thing === \"object\" && property in (thing as Record);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static API name and key.\n */\nexport interface NamedKeyCredential {\n /**\n * The value of the API key represented as a string\n */\n readonly key: string;\n /**\n * The value of the API name represented as a string.\n */\n readonly name: string;\n}\n\n/**\n * A static name/key-based credential that supports updating\n * the underlying name and key values.\n */\nexport class AzureNamedKeyCredential implements NamedKeyCredential {\n private _key: string;\n private _name: string;\n\n /**\n * The value of the key to be used in authentication.\n */\n public get key(): string {\n return this._key;\n }\n\n /**\n * The value of the name to be used in authentication.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * Create an instance of an AzureNamedKeyCredential for use\n * with a service client.\n *\n * @param name - The initial value of the name to use in authentication.\n * @param key - The initial value of the key to use in authentication.\n */\n constructor(name: string, key: string) {\n if (!name || !key) {\n throw new TypeError(\"name and key must be non-empty strings\");\n }\n\n this._name = name;\n this._key = key;\n }\n\n /**\n * Change the value of the key.\n *\n * Updates will take effect upon the next request after\n * updating the key value.\n *\n * @param newName - The new name value to be used.\n * @param newKey - The new key value to be used.\n */\n public update(newName: string, newKey: string): void {\n if (!newName || !newKey) {\n throw new TypeError(\"newName and newKey must be non-empty strings\");\n }\n\n this._name = newName;\n this._key = newKey;\n }\n}\n\n/**\n * Tests an object to determine whether it implements NamedKeyCredential.\n *\n * @param credential - The assumed NamedKeyCredential to be tested.\n */\nexport function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential {\n return (\n isObjectWithProperties(credential, [\"name\", \"key\"]) &&\n typeof credential.key === \"string\" &&\n typeof credential.name === \"string\"\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { isObjectWithProperties } from \"./typeguards\";\n\n/**\n * Represents a credential defined by a static shared access signature.\n */\nexport interface SASCredential {\n /**\n * The value of the shared access signature represented as a string\n */\n readonly signature: string;\n}\n\n/**\n * A static-signature-based credential that supports updating\n * the underlying signature value.\n */\nexport class AzureSASCredential implements SASCredential {\n private _signature: string;\n\n /**\n * The value of the shared access signature to be used in authentication\n */\n public get signature(): string {\n return this._signature;\n }\n\n /**\n * Create an instance of an AzureSASCredential for use\n * with a service client.\n *\n * @param signature - The initial value of the shared access signature to use in authentication\n */\n constructor(signature: string) {\n if (!signature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = signature;\n }\n\n /**\n * Change the value of the signature.\n *\n * Updates will take effect upon the next request after\n * updating the signature value.\n *\n * @param newSignature - The new shared access signature value to be used\n */\n public update(newSignature: string): void {\n if (!newSignature) {\n throw new Error(\"shared access signature must be a non-empty string\");\n }\n\n this._signature = newSignature;\n }\n}\n\n/**\n * Tests an object to determine whether it implements SASCredential.\n *\n * @param credential - The assumed SASCredential to be tested.\n */\nexport function isSASCredential(credential: unknown): credential is SASCredential {\n return (\n isObjectWithProperties(credential, [\"signature\"]) && typeof credential.signature === \"string\"\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n"],"names":[],"mappings":";;;;AAAA;AACA;AAYA;;;;MAIa,kBAAkB;;;;;;;IAgB7B,YAAY,GAAW;QACrB,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,IAAI,KAAK,CAAC,gCAAgC,CAAC,CAAC;SACnD;QAED,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;KACjB;;;;IAhBD,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;;;;;;;;;IAwBM,MAAM,CAAC,MAAc;QAC1B,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;KACpB;;;ACnDH;AACA;AAEA;;;;;AAKA,SAAS,SAAS,CAAI,KAA2B;IAC/C,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC;AAED;;;;;;;AAOA,SAAgB,sBAAsB,CACpC,KAAY,EACZ,UAA0B;IAE1B,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAClD,OAAO,KAAK,CAAC;KACd;IAED,KAAK,MAAM,QAAQ,IAAI,UAAU,EAAE;QACjC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;;;;;AAOA,SAAS,iBAAiB,CACxB,KAAY,EACZ,QAAsB;IAEtB,OAAO,OAAO,KAAK,KAAK,QAAQ,IAAI,QAAQ,IAAK,KAAiC,CAAC;AACrF,CAAC;;AChDD;AACA,AAkBA;;;;AAIA,MAAa,uBAAuB;;;;;;;;IAyBlC,YAAY,IAAY,EAAE,GAAW;QACnC,IAAI,CAAC,IAAI,IAAI,CAAC,GAAG,EAAE;YACjB,MAAM,IAAI,SAAS,CAAC,wCAAwC,CAAC,CAAC;SAC/D;QAED,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC;KACjB;;;;IAzBD,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;;;;IAKD,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;;;;;;IA2BM,MAAM,CAAC,OAAe,EAAE,MAAc;QAC3C,IAAI,CAAC,OAAO,IAAI,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,SAAS,CAAC,8CAA8C,CAAC,CAAC;SACrE;QAED,IAAI,CAAC,KAAK,GAAG,OAAO,CAAC;QACrB,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC;KACpB;CACF;AAED;;;;;AAKA,SAAgB,oBAAoB,CAAC,UAAmB;IACtD,QACE,sBAAsB,CAAC,UAAU,EAAE,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;QACnD,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;QAClC,OAAO,UAAU,CAAC,IAAI,KAAK,QAAQ,EACnC;AACJ,CAAC;;ACvFD;AACA,AAcA;;;;AAIA,MAAa,kBAAkB;;;;;;;IAgB7B,YAAY,SAAiB;QAC3B,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;KAC7B;;;;IAhBD,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,UAAU,CAAC;KACxB;;;;;;;;;IAwBM,MAAM,CAAC,YAAoB;QAChC,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAI,CAAC,UAAU,GAAG,YAAY,CAAC;KAChC;CACF;AAED;;;;;AAKA,SAAgB,eAAe,CAAC,UAAmB;IACjD,QACE,sBAAsB,CAAC,UAAU,EAAE,CAAC,WAAW,CAAC,CAAC,IAAI,OAAO,UAAU,CAAC,SAAS,KAAK,QAAQ,EAC7F;AACJ,CAAC;;ACrED;AACA;AA2EA;;;;;AAKA,SAAgB,iBAAiB,CAAC,UAAmB;;;;;;IAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,QACE,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;SAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;AACJ,CAAC;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/README.md b/node_modules/@azure/core-auth/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js b/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json b/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/package.json b/node_modules/@azure/core-auth/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.html b/node_modules/@azure/core-auth/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-auth/node_modules/tslib/tslib.js b/node_modules/@azure/core-auth/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/core-auth/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-auth/package.json b/node_modules/@azure/core-auth/package.json new file mode 100644 index 0000000000..dfca785123 --- /dev/null +++ b/node_modules/@azure/core-auth/package.json @@ -0,0 +1,99 @@ +{ + "name": "@azure/core-auth", + "version": "1.3.2", + "description": "Provides low-level interfaces and helper methods for authentication in Azure SDK", + "sdk-type": "client", + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/latest/core-auth.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/*": [ + "types/3.1/*" + ] + } + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:browser": "tsc -p . && cross-env ONLY_BROWSER=true rollup -c 2>&1", + "build:node": "tsc -p . && cross-env ONLY_NODE=true rollup -c 2>&1", + "build:samples": "echo Skipped.", + "build:test": "tsc -p . && rollup -c rollup.test.config.js 2>&1", + "build:types": "downlevel-dts types/latest types/3.1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "prebuild": "npm run clean", + "test:browser": "npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run build:test && npm run unit-test:node && npm run integration-test:node", + "test": "npm run build:test && npm run unit-test && npm run integration-test", + "unit-test:browser": "echo skipped", + "unit-test:node": "mocha dist-test/**/*.js --reporter ../../../common/tools/mocha-multi-reporter.js", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src" + }, + "files": [ + "dist/", + "dist-esm/src/", + "types/latest/core-auth.d.ts", + "types/3.1", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "authentication", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=12.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-auth/README.md", + "sideEffects": false, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.7.11", + "@rollup/plugin-commonjs": "11.0.2", + "@rollup/plugin-json": "^4.0.0", + "@rollup/plugin-multi-entry": "^3.0.0", + "@rollup/plugin-node-resolve": "^8.0.0", + "@rollup/plugin-replace": "^2.2.0", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "assert": "^1.4.1", + "cross-env": "^7.0.2", + "downlevel-dts": "~0.4.0", + "eslint": "^7.15.0", + "inherits": "^2.0.3", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "prettier": "^1.16.4", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-terser": "^5.1.1", + "rollup-plugin-visualizer": "^4.0.4", + "typescript": "~4.2.0", + "util": "^0.12.1", + "typedoc": "0.15.2" + } +} diff --git a/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts b/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts new file mode 100644 index 0000000000..33d2dbcf1f --- /dev/null +++ b/node_modules/@azure/core-auth/types/3.1/core-auth.d.ts @@ -0,0 +1,258 @@ +import { AbortSignalLike } from '@azure/abort-controller'; +/** + * Represents an access token with an expiration time. + */ +export declare interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + readonly key: string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + readonly key: string; + readonly name: string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + readonly signature: string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} +/** + * An interface structurally compatible with OpenTelemetry. + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +/** + * Defines options for TokenCredential.getToken. + */ +export declare interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context + */ + tracingContext?: Context; + }; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; +/** + * Represents a credential defined by a static API key. + */ +export declare interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} +/** + * Represents a credential defined by a static API name and key. + */ +export declare interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} +/** + * Represents a credential defined by a static shared access signature. + */ +export declare interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Span attributes. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; +/** + * A light interface that tries to be structurally compatible with OpenTelemetry. + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; +} +/** + * An interface that enables manual propagation of Spans. + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; +} +/** + * Represents a credential capable of providing an authentication token. + */ +export declare interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} +export {}; diff --git a/node_modules/@azure/core-auth/types/latest/core-auth.d.ts b/node_modules/@azure/core-auth/types/latest/core-auth.d.ts new file mode 100644 index 0000000000..bea17451eb --- /dev/null +++ b/node_modules/@azure/core-auth/types/latest/core-auth.d.ts @@ -0,0 +1,288 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * Represents an access token with an expiration time. + */ +export declare interface AccessToken { + /** + * The access token returned by the authentication service. + */ + token: string; + /** + * The access token's expiration timestamp in milliseconds, UNIX epoch time. + */ + expiresOnTimestamp: number; +} + +/** + * A static-key-based credential that supports updating + * the underlying key value. + */ +export declare class AzureKeyCredential implements KeyCredential { + private _key; + /** + * The value of the key to be used in authentication + */ + get key(): string; + /** + * Create an instance of an AzureKeyCredential for use + * with a service client. + * + * @param key - The initial value of the key to use in authentication + */ + constructor(key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newKey - The new key value to be used + */ + update(newKey: string): void; +} + +/** + * A static name/key-based credential that supports updating + * the underlying name and key values. + */ +export declare class AzureNamedKeyCredential implements NamedKeyCredential { + private _key; + private _name; + /** + * The value of the key to be used in authentication. + */ + get key(): string; + /** + * The value of the name to be used in authentication. + */ + get name(): string; + /** + * Create an instance of an AzureNamedKeyCredential for use + * with a service client. + * + * @param name - The initial value of the name to use in authentication. + * @param key - The initial value of the key to use in authentication. + */ + constructor(name: string, key: string); + /** + * Change the value of the key. + * + * Updates will take effect upon the next request after + * updating the key value. + * + * @param newName - The new name value to be used. + * @param newKey - The new key value to be used. + */ + update(newName: string, newKey: string): void; +} + +/** + * A static-signature-based credential that supports updating + * the underlying signature value. + */ +export declare class AzureSASCredential implements SASCredential { + private _signature; + /** + * The value of the shared access signature to be used in authentication + */ + get signature(): string; + /** + * Create an instance of an AzureSASCredential for use + * with a service client. + * + * @param signature - The initial value of the shared access signature to use in authentication + */ + constructor(signature: string); + /** + * Change the value of the signature. + * + * Updates will take effect upon the next request after + * updating the signature value. + * + * @param newSignature - The new shared access signature value to be used + */ + update(newSignature: string): void; +} + +/** + * An interface structurally compatible with OpenTelemetry. + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} + +/** + * Defines options for TokenCredential.getToken. + */ +export declare interface GetTokenOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: { + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + }; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context + */ + tracingContext?: Context; + }; + /** + * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints. + */ + tenantId?: string; +} + +/** + * Tests an object to determine whether it implements NamedKeyCredential. + * + * @param credential - The assumed NamedKeyCredential to be tested. + */ +export declare function isNamedKeyCredential(credential: unknown): credential is NamedKeyCredential; + +/** + * Tests an object to determine whether it implements SASCredential. + * + * @param credential - The assumed SASCredential to be tested. + */ +export declare function isSASCredential(credential: unknown): credential is SASCredential; + +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +export declare function isTokenCredential(credential: unknown): credential is TokenCredential; + +/** + * Represents a credential defined by a static API key. + */ +export declare interface KeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; +} + +/** + * Represents a credential defined by a static API name and key. + */ +export declare interface NamedKeyCredential { + /** + * The value of the API key represented as a string + */ + readonly key: string; + /** + * The value of the API name represented as a string. + */ + readonly name: string; +} + +/** + * Represents a credential defined by a static shared access signature. + */ +export declare interface SASCredential { + /** + * The value of the shared access signature represented as a string + */ + readonly signature: string; +} + +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Span attributes. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; + +/** + * A light interface that tries to be structurally compatible with OpenTelemetry. + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; +} + +/** + * An interface that enables manual propagation of Spans. + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; +} + +/** + * Represents a credential capable of providing an authentication token. + */ +export declare interface TokenCredential { + /** + * Gets the token provided by this credential. + * + * This method is called automatically by Azure SDK client libraries. You may call this method + * directly, but you must also handle token caching and token refreshing. + * + * @param scopes - The list of scopes for which the token will have access. + * @param options - The options used to configure any requests this + * TokenCredential implementation might make. + */ + getToken(scopes: string | string[], options?: GetTokenOptions): Promise; +} + +export { } diff --git a/node_modules/@azure/core-http/CHANGELOG.md b/node_modules/@azure/core-http/CHANGELOG.md new file mode 100644 index 0000000000..e20ccb575c --- /dev/null +++ b/node_modules/@azure/core-http/CHANGELOG.md @@ -0,0 +1,262 @@ +# Release History + +## 2.2.5 (2022-05-05) + +### Bugs Fixed + +- Fix an issue where React-Native is loading the wrong file. Adding a `react-native` mapping to point to the ESM entrypoint file. [PR #21118](https://github.com/Azure/azure-sdk-for-js/pull/21118) +- delay creating XML parser/builder objects so that packages not requiring XML functionality but running on platforms lacking XML support can still load this package. [PR #21118](https://github.com/Azure/azure-sdk-for-js/pull/21118) +- Add a `react-native` mapping to use `xml2js` as it is already in our dependency list. Customer can get it to work after installing `stream` and `timers` packages in their React-Native project. +- Fix a run-time error in user agent policy when running in React-Native. + +## 2.2.4 (2022-02-03) + +### Bugs Fixed + +- Updated the HTTP tracing span names to conform to the [OpenTelemetry Specification](https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#name). [#19838](https://github.com/Azure/azure-sdk-for-js/pull/19838) + - New HTTP spans will use the `HTTP ` convention instead of using the URL path. + +## 2.2.3 (2022-01-06) + +### Bugs Fixed + +- Fix `HttpHeaders.rawHeaders()` to preserve header name case. As a result HttpClient now sends requests with their original header names. `HttpHeaders.toJson()` now has an option to preserve header key casing. + +### Other Changes + +- Update dependency `node-fetch` version to `2.6.6` to address advisory [CVE-2020-15168](https://github.com/advisories/GHSA-w7rc-rwvf-8q5r) + +## 2.2.2 (2021-11-03) + +### Bugs Fixed + +- Fix the issue of `HttpHeaders.clone()` not preserving raw header names' casing [PR #18348](https://github.com/Azure/azure-sdk-for-js/pull/18348) +- Fix the issue of network connection not being closed when download stream is closed [PR #14015](https://github.com/Azure/azure-sdk-for-js/pull/14015) + +## 2.2.1 (2021-09-30) + +### Bugs Fixed + +- Fix incorrect caching of proxy agents [PR #17546](https://github.com/Azure/azure-sdk-for-js/pull/17546) + +## 2.2.0 (2021-09-02) + +### Bugs Fixed + +- `tracingPolicy` will no longer propagate tracing errors to the caller, and such errors will be logged instead and the operation does not get interrupted. [PR #16916](https://github.com/Azure/azure-sdk-for-js/pull/16916) + +## 2.1.0 (2021-07-22) + +### Features Added + +- `tracingPolicy` will no longer inject invalid traceparent headers if an incorrect tracer implementation is used. +- `proxyPolicy` now allows passing in a list of no-proxy patterns to override global ones loaded from NO_PROXY environment variable [PR #16414](https://github.com/Azure/azure-sdk-for-js/pull/16414) + +## 2.0.0 (2021-06-30) + +### Features Added + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features. +- Added support for the `Retry-After` header on responses with status code 503, Service Unavailable. +- Added support for multiple retries on the `ThrottlingRetryPolicy` (up to 3 by default). + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.12`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +### Fixed + +- Fixed an issue where `proxySettings` does not work when there is username but no password [Issue 15720](https://github.com/Azure/azure-sdk-for-js/issues/15720) +- Throttling retry policy respects abort signal [#15796](https://github.com/Azure/azure-sdk-for-js/issues/15796) + +## 1.2.6 (2021-06-14) + +### Key Bugs Fixed + +- Fixed an issue of lost properties when flattening array in deserialization [issue 15653](https://github.com/azure/azure-sdk-for-js/issues/15653) +- Fixed an issue of incorrect minimum version of tslib [issue 15697](https://github.com/Azure/azure-sdk-for-js/issues/15697) + +## 1.2.5 (2021-06-03) + +### Fixed + +- Delay loading of NO_PROXY environment variable until when request pipeline is being created. This fixes [issue 14873](https://github.com/Azure/azure-sdk-for-js/issues/14873) +- Fixed an issue where tracing spans were not setting a status correctly (on success or error) which results in the span status being `UNSET`. In addition, we will now capture the HTTP status code when a request fails in the tracing span. [PR 15061](https://github.com/Azure/azure-sdk-for-js/pull/15061) +- Fix packaging issue [PR 15286](https://github.com/Azure/azure-sdk-for-js/pull/15286) +- Improve the sanitizer to handle recursive objects [PR 15426](https://github.com/Azure/azure-sdk-for-js/pull/15426) + +## 1.2.4 (2021-03-30) + +- Rewrote `bearerTokenAuthenticationPolicy` to use a new backend that refreshes tokens only when they're about to expire and not multiple times before. This fixes the issue: [13369](https://github.com/Azure/azure-sdk-for-js/issues/13369). + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.11`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +## 1.2.3 (2021-02-04) + +- Don't set a default content-type when there is no request body. [PR 13233](https://github.com/Azure/azure-sdk-for-js/pull/13233) +- Clean up abort event handler properly for streaming operations. Fixed [issue 12029](https://github.com/Azure/azure-sdk-for-js/issues/12029) +- Reduce memory usage of the cache in proxy policy. Fixed [issue 13277](https://github.com/Azure/azure-sdk-for-js/issues/13277) +- Fix an issue where non-streaming response body was treated as stream [PR 13192](https://github.com/Azure/azure-sdk-for-js/pull/13192) +- Browser XML parser now lazily load parser error namespace into cache. Fixed [issue 13268](https://github.com/Azure/azure-sdk-for-js/issues/13268) +- Add ms-cv header used as correlation vector (used for distributed tracing) to list of non-redacted headers so that clients can share this header for debugging purposes. [PR 13541](https://github.com/Azure/azure-sdk-for-js/pull/13541) + +## 1.2.2 (2021-01-07) + +- Cache the default `HttpClient` used when one isn't passed in to `ServiceClient`. This means that for most packages we will only create a single `HttpClient`, which will improve platform resource usage by reducing overhead. [PR 12966](https://github.com/Azure/azure-sdk-for-js/pull/12966) + +## 1.2.1 (2020-12-09) + +- Support custom auth scopes. Scope is a mechanism in OAuth 2.0 to limit an application's access to a user's account [PR 12752](https://github.com/Azure/azure-sdk-for-js/pull/12752) +- Add helper function `createSpan` to help in the creation of tracing spans in Track2 libraries [PR 12525](https://github.com/Azure/azure-sdk-for-js/pull/12525) + +## 1.2.0 (2020-11-05) + +- Explicitly set `manual` redirect handling for node fetch. And fixing redirectPipeline [PR 11863](https://github.com/Azure/azure-sdk-for-js/pull/11863) +- Add support for multiple error response codes. [PR 11841](https://github.com/Azure/azure-sdk-for-js/) +- Allow customizing serializer behavior via optional `SerialzierOptions` parameters. Particularly allow using a different `XML_CHARKEY` than the default `_` when parsing XML [PR 12065](https://github.com/Azure/azure-sdk-for-js/pull/12065) +- Unexpected status code is now handled as an error if a mapper is not provided for it [PR 12153](https://github.com/Azure/azure-sdk-for-js/pull/12153) +- Empty collection is now serialized correctly for `multi` format query parameters [PR 12090](https://github.com/Azure/azure-sdk-for-js/pull/12090) +- De-serialize error from parsed object before trying to use its `code` and `message` properties [PR 12150](https://github.com/Azure/azure-sdk-for-js/pull/12150) +- Fix an error thrown on older versions of Safari [PR 11759](https://github.com/Azure/azure-sdk-for-js/pull/11759) + +## 1.1.9 (2020-09-30) + +- Add support for XML namespaces in deserialization [PR 11201](https://github.com/Azure/azure-sdk-for-js/pull/11201) + +- Add support for NDJSON format [PR 11325](https://github.com/Azure/azure-sdk-for-js/pull/11325) + +- Add support for `NO_PROXY` and `ALL_PROXY` environment variables [PR 7069](https://github.com/Azure/azure-sdk-for-js/pull/7069) + +- Serializer now resolves `additionalProperties` correctly from referenced mapper [PR 11473](https://github.com/Azure/azure-sdk-for-js/pull/11473) + +- Empty wrapped XML element lists are no properly de-serialized. This fixes [issue 11071](https://github.com/Azure/azure-sdk-for-js/issues/11071) + +## 1.1.8 (2020-09-08) + +- `URLQuery` parsing now accepts `=` in query parameter values and no longer drops such query parameter name/value pairs. This fixes [issue 11014](https://github.com/Azure/azure-sdk-for-js/issues/11014) + +## 1.1.7 (2020-09-02) + +- `BearerTokenAuthenticationPolicy` now starts trying to refresh the token 30 seconds before the token expires. It will only try to refresh said token on each request when refreshing is not in progress. In the mean time, requests will use the existing token. This fixes [issue 10084](https://github.com/Azure/azure-sdk-for-js/issues/10084). +- Un-export the `{...}Policy` classes that were meant to be internal to `@azure/core-http`. [PR 10738](https://github.com/Azure/azure-sdk-for-js/pull/10738) + +## 1.1.6 (2020-08-04) + +- Update dependencies `@azure/core-tracing` to version 1.0.0-preview.9 and `@opentelemetry/api` to version 0.10.2 [PR 10393](https://github.com/Azure/azure-sdk-for-js/pull/10393) + +## 1.1.5 (2020-08-04) + +- The global `fetch()` is no longer overridden by node-fetch. This fixed an issue impacting Electron render process. [PR #9880](https://github.com/Azure/azure-sdk-for-js/pull/9880) + +## 1.1.4 (2020-07-02) + +- Fix issue with flattened model serialization, where constant properties are being dropped [PR #8658](https://github.com/Azure/azure-sdk-for-js/pull/8658) +- Switch to use `x-ms-useragent` header key for passing user agent info in browsers [PR #9490](https://github.com/Azure/azure-sdk-for-js/pull/9490) +- Refactor `ExponentialRetryPolicy` and `SystemErrorRetryPolicy` to share common code [PR #9667](https://github.com/Azure/azure-sdk-for-js/pull/9490) + +## 1.1.3 (2020-06-03) + +- Fix issue of `SystemErrorRetryPolicy` didn't retry on errors [PR #8803](https://github.com/Azure/azure-sdk-for-js/pull/8803) +- Add support for serialization of text media type. [PR #8977](https://github.com/Azure/azure-sdk-for-js/pull/8977) +- Fix issue with URLBuilder incorrectly handling full URL in path. [PR #9245](https://github.com/Azure/azure-sdk-for-js/pull/9245) + +## 1.1.2 (2020-05-07) + +- Fix issue with null/undefined values in array and tabs/space delimiter arrays during sendOperationRequest. [PR #8604](https://github.com/Azure/azure-sdk-for-js/pull/8604) + +## 1.1.1 (2020-04-28) + +- Add support for `text/plain` endpoints. [PR #7963](https://github.com/Azure/azure-sdk-for-js/pull/7963) +- Updated to use OpenTelemetry 0.6.1 via `@azure/core-tracing`. + +## 1.1.0 (2020-03-31) + +- A new interface `WebResourceLike` was introduced to avoid a direct dependency on the class `WebResource` in public interfaces. `HttpHeadersLike` was also added to replace references to `HttpHeaders`. This should improve cross-version compatibility for core-http. [PR #7873](https://github.com/Azure/azure-sdk-for-js/pull/7873) + +- Add support to disable response decompression in `node-fetch` Http client. [PR #7878](https://github.com/Azure/azure-sdk-for-js/pull/7878) + +## 1.0.4 (2020-03-03) + +- When an operation times out based on the `timeout` configured in the `OperationRequestOptions`, it gets terminated with an error. In this update, the error that is thrown in browser for such cases is updated to match what is thrown in node i.e an `AbortError` is thrown instead of the previous `RestError`. [PR #7159](https://github.com/Azure/azure-sdk-for-js/pull/7159) +- Support for username and password in the proxy url [PR #7211](https://github.com/Azure/azure-sdk-for-js/pull/7211) +- Removed dependency on `lib.dom.d.ts` so TypeScript users no longer need `lib: ["dom"]` in their tsconfig.json file to use this library. [PR #7500](https://github.com/Azure/azure-sdk-for-js/pull/7500) + +## 1.0.3 (2020-01-02) + +- Added `x-ms-useragent` to the list of allowed headers in request logs. +- Fix issue of data being pushed twice when reporting progress ([PR #6427](https://github.com/Azure/azure-sdk-for-js/issues/6427)) +- Move `getDefaultProxySettings()` calls into `proxyPolicy` so that libraries that don't use the PipelineOptions or createDefaultRequestPolicyFactories from core-http can use this behavior without duplicating code. ([PR #6478](https://github.com/Azure/azure-sdk-for-js/issues/6478)) +- Fix tracingPolicy() to set standard span attributes ([PR #6565](https://github.com/Azure/azure-sdk-for-js/pull/6565)). Now the following are set correctly for the spans + - `http.method` + - `http.url` + - `http.user_agent` + - `http.status_code` + - `requestId` + - `serviceRequestId` + - `kind` = Client + - span name: URI path + +## 1.0.2 (2019-12-02) + +- Updated to use OpenTelemetry 0.2 via `@azure/core-tracing` + +## 1.0.0 (2019-10-29) + +- This release marks the general availability of the `@azure/core-http` package. +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.6 (2019-10-22) + +- Introduced a HTTP pipeline configuration type, `PipelineOptions`, which makes it easier to configure common settings for API requests. This is now the options type used in the `@azure/keyvault-*` data plane libraries. +- Support for HTTP request logging has been redesigned to use `@azure/logger`; use`AZURE_LOG_LEVEL="info"` to see full request/response logs when running in Node.js. In the browser, you can import `setLogLevel` from `@azure/logger` to change the log level; logs will then be written to the browser console. +- Removed error type `ResponseBodyNotFoundError` that was introduced in the previous preview. Use cases for it were removed. +- Fixed an issue where error response details were not being deserialized correctly when the default mapper is used. +- In Node.js, HTTP agents configured for proxy or keepAlive are now reused across requests. This resolves a memory leak reported in [#4964](https://github.com/Azure/azure-sdk-for-js/issues/4964). +- Fixes a memory leak issue resulting from event listeners not being removed from abortSignals. +- Cancelling an operation using an `abortSignal` will now throw an `AbortError`. + The `name` property on the error will match "AbortError". + +## 1.0.0-preview.4 (2019-10-07) + +- No longer re-exports `@azure/core-tracing`. To enable tracing, call `setTracer` from `@azure/core-tracing` directly. + ([PR #5389](https://github.com/Azure/azure-sdk-for-js/pull/5389)) +- Report upload/download progress only after the first data chunk is received + ([PR #5298](https://github.com/Azure/azure-sdk-for-js/pull/5298)) +- Added new error type `ResponseBodyNotFoundError` for cases when the response body is unexpectedly empty. + ([PR #5369](https://github.com/Azure/azure-sdk-for-js/pull/5369)) +- Temporary fix for a memory leak issue resulting from creating new agents every time WebResource is cloned. + ([PR #5396](https://github.com/Azure/azure-sdk-for-js/pull/5396)) + +## 1.0.0-preview.3 (2019-09-09) + +- Syncs changes from `@azure/ms-rest-js` to `@azure/core-http`. + ([PR #4756](https://github.com/Azure/azure-sdk-for-js/pull/4756)). + - Updates HTTP clients to `fetch` and `node-fetch` for the browser and node.js respectively. +- Reintroduces `ServiceClientCredentials` type to `credentials` parameter of `ServiceClient` + ([PR #4773](https://github.com/Azure/azure-sdk-for-js/pull/4773)). +- Updates types for better compatibility with TypeScript 3.6.x. + ([PR #4928](https://github.com/Azure/azure-sdk-for-js/pull/4928)). +- Adds `TracingPolicy` to support setting `traceparent` and `tracestate` headers + when setting spans in operation as per the [trace context HTTP headers format](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format). + ([PR #4712](https://github.com/Azure/azure-sdk-for-js/pull/4712)). +- Adds `text/plain` as an accepted MIME type for JSON output. + ([PR #4975](https://github.com/Azure/azure-sdk-for-js/pull/4975)). +- Exposes `ProxySettings` type. ([PR #5043](https://github.com/Azure/azure-sdk-for-js/pull/5043)). +- Fixes bug where `WebResource.clone` would not copy `proxySettings` or `keepAlive` settings. + ([PR #5047](https://github.com/Azure/azure-sdk-for-js/pull/5047)). + +## 1.0.0-preview.2 (2019-08-05) + +- Removed `ServiceClientCredentials` type from `credentials` parameter of `ServiceClient` ([PR #4367](https://github.com/Azure/azure-sdk-for-js/pull/4367)). Credential implementations are now standardized on `@azure/core-auth`'s `TokenCredential` interface and provided by `@azure/identity`. +- Added an `AccessTokenCache` so that access tokens can be cached across pipeline instances ([PR #4174](https://github.com/Azure/azure-sdk-for-js/pull/4174)). +- Fixed the issue preventing `ServiceClient` from correctly setting the scope's resource URI when creating a `BearerTokenAuthenticationPolicy` ([PR #4335](https://github.com/Azure/azure-sdk-for-js/pull/4335)). +- Migrated over `AxiosHttpClient` fixes from `@azure/ms-rest-js` ([PR #4106](https://github.com/Azure/azure-sdk-for-js/pull/4106)). + +## 1.0.0-preview.1 (2019-06-25) + +- Forked `@azure/ms-rest-js` to `@azure/core-http` to form the base HTTP pipeline of the Azure SDK TypeScript/JavaScript libraries. +- Added `TokenCredential` to define a simple interface for credentials that provided bearer tokens +- Added `BearerTokenAuthenticationPolicy` which can use a `TokenCredential` implementation to perform bearer token authentication against Azure services diff --git a/node_modules/@azure/core-http/LICENSE b/node_modules/@azure/core-http/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/core-http/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-http/README.md b/node_modules/@azure/core-http/README.md new file mode 100644 index 0000000000..2ee90212ff --- /dev/null +++ b/node_modules/@azure/core-http/README.md @@ -0,0 +1,72 @@ +# Azure Core HTTP client library for JavaScript + +This is the core HTTP pipeline for Azure SDK JavaScript libraries which work in the browser and Node.js. This library is primarily intended to be used in code generated by [AutoRest](https://github.com/Azure/Autorest) and [`autorest.typescript`](https://github.com/Azure/autorest.typescript). + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://nodejs.org/about/releases/) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Installation + +This package is primarily used in generated code and not meant to be consumed directly by end users. + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/docs/architectureOverview.md). + +## Examples + +Examples can be found in the `samples` folder. + +## Next steps + +- Build this library (`core-http`). For more information on how to build project in this repo, please refer to the [Contributing Guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md). + +- The code in `samples\node-sample.ts` shows how to create a `ServiceClient` instance with a test `TokenCredential` implementation and use the client instance to perform a `GET` operation from the Azure management service endpoint for subscriptions. To run the code, first obtain an access token to the Azure management service. + +One easy way to get an access token is using [Azure CLI](https://docs.microsoft.com/cli/azure/?view=azure-cli-latest) + +1. Sign in +```shell +az login +``` +2. Select the subscription to use +```shell +az account set -s +``` +3. Obtain an access token +```shell +az account get-access-token --resource=https://management.azure.com +``` + +### NodeJS + +- Set values of `subscriptionId` and `token` variable in `samples/node-sample.ts` + +- Change directory to samples folder, compile the TypeScript code, then run the sample + +``` +cd samples +tsc node-sample.ts +node node-sample.js +``` + +### Browser + +- Set values of `subscriptionId` and `token` variable in `samples/index.js` +- Follow the instructions of [JavaScript Bundling Guide using Parcel](https://github.com/Azure/azure-sdk-for-js/blob/main/documentation/Bundling.md#using-parcel) to build and run the code in the browser. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-http%2FREADME.png) diff --git a/node_modules/@azure/core-http/dist-esm/src/coreHttp.js b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js new file mode 100644 index 0000000000..7692409797 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */ +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { ServiceClient, flattenResponse, createPipelineFromOptions, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { keepAlivePolicy } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { tracingPolicy } from "./policies/tracingPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { delay } from "./util/delay"; +// legacy exports. Use core-tracing instead (and remove on next major version update of core-http). +export { createSpanFunction } from "./createSpanLegacy"; +// Credentials +export { isTokenCredential } from "@azure/core-auth"; +export { ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map new file mode 100644 index 0000000000..4cdae3912e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/coreHttp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"coreHttp.js","sourceRoot":"","sources":["../../src/coreHttp.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,wEAAwE;AACxE,yCAAyC;AAEzC,OAAO,EACL,WAAW,GAQZ,MAAM,eAAe,CAAC;AAEvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC,OAAO,EAGL,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAS5B,OAAO,EACL,aAAa,EAEb,eAAe,EACf,yBAAyB,GAG1B,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAgB,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAmB,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAoB,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,GAGzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EAErB,uBAAuB,GAExB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAwB,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AACrC,mGAAmG;AACnG,OAAO,EAAE,kBAAkB,EAAc,MAAM,oBAAoB,CAAC;AAEpE,cAAc;AACd,OAAO,EAAiD,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAoB,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAGlE,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAqB,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */\n/// \n\nexport {\n WebResource,\n WebResourceLike,\n HttpRequestBody,\n RequestPrepareOptions,\n HttpMethods,\n ParameterValue,\n RequestOptionsBase,\n TransferProgressEvent,\n} from \"./webResource\";\nexport { CommonResponse, CommonRequestInit, CommonRequestInfo } from \"./nodeFetchHttpClient\";\nexport { DefaultHttpClient } from \"./defaultHttpClient\";\nexport { HttpClient } from \"./httpClient\";\nexport { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from \"./httpHeaders\";\nexport { HttpOperationResponse, HttpResponse, RestResponse } from \"./httpOperationResponse\";\nexport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nexport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\nexport { RestError } from \"./restError\";\nexport { OperationArguments } from \"./operationArguments\";\nexport {\n OperationOptions,\n OperationRequestOptions,\n operationOptionsToRequestOptionsBase,\n} from \"./operationOptions\";\nexport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n ParameterPath,\n} from \"./operationParameter\";\nexport { OperationResponse } from \"./operationResponse\";\nexport { OperationSpec } from \"./operationSpec\";\nexport {\n ServiceClient,\n ServiceClientOptions,\n flattenResponse,\n createPipelineFromOptions,\n ProxySettings,\n ProxyOptions,\n} from \"./serviceClient\";\nexport { PipelineOptions, InternalPipelineOptions } from \"./pipelineOptions\";\nexport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nexport { Constants } from \"./util/constants\";\nexport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nexport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nexport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nexport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nexport { exponentialRetryPolicy, RetryOptions, RetryMode } from \"./policies/exponentialRetryPolicy\";\nexport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nexport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nexport { getDefaultProxySettings, proxyPolicy } from \"./policies/proxyPolicy\";\nexport { redirectPolicy, RedirectOptions } from \"./policies/redirectPolicy\";\nexport { keepAlivePolicy, KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nexport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nexport { signingPolicy } from \"./policies/signingPolicy\";\nexport {\n userAgentPolicy,\n getDefaultUserAgentValue,\n UserAgentOptions,\n TelemetryInfo,\n} from \"./policies/userAgentPolicy\";\nexport {\n deserializationPolicy,\n DeserializationOptions,\n deserializeResponseBody,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nexport { tracingPolicy, TracingPolicyOptions } from \"./policies/tracingPolicy\";\nexport {\n MapperType,\n SimpleMapperType,\n CompositeMapperType,\n DictionaryMapperType,\n SequenceMapperType,\n EnumMapperType,\n Mapper,\n BaseMapper,\n CompositeMapper,\n SequenceMapper,\n DictionaryMapper,\n EnumMapper,\n MapperConstraints,\n PolymorphicDiscriminator,\n Serializer,\n UrlParameterValue,\n serializeObject,\n} from \"./serializer\";\nexport {\n stripRequest,\n stripResponse,\n executePromisesSequentially,\n generateUuid,\n encodeUri,\n ServiceCallback,\n promiseToCallback,\n promiseToServiceCallback,\n isValidUuid,\n applyMixins,\n isNode,\n isDuration,\n} from \"./util/utils\";\nexport { URLBuilder, URLQuery } from \"./url\";\nexport { AbortSignalLike } from \"@azure/abort-controller\";\nexport { delay } from \"./util/delay\";\n// legacy exports. Use core-tracing instead (and remove on next major version update of core-http).\nexport { createSpanFunction, SpanConfig } from \"./createSpanLegacy\";\n\n// Credentials\nexport { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from \"@azure/core-auth\";\nexport { AccessTokenCache, ExpiringAccessTokenCache } from \"./credentials/accessTokenCache\";\nexport { AccessTokenRefresher } from \"./credentials/accessTokenRefresher\";\nexport { BasicAuthenticationCredentials } from \"./credentials/basicAuthenticationCredentials\";\nexport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./credentials/apiKeyCredentials\";\nexport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nexport { TopicCredentials } from \"./credentials/topicCredentials\";\nexport { Authenticator } from \"./credentials/credentials\";\n\nexport { parseXML, stringifyXML } from \"./util/xml\";\nexport { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from \"./util/serializer.common\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js new file mode 100644 index 0000000000..5deff577f7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// NOTE: we've moved this code into core-tracing but these functions +// were a part of the GA'd library and can't be removed until the next major +// release. They currently get called always, even if tracing is not enabled. +import { createSpanFunction as coreTracingCreateSpanFunction } from "@azure/core-tracing"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export function createSpanFunction(args) { + return coreTracingCreateSpanFunction(args); +} +//# sourceMappingURL=createSpanLegacy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map new file mode 100644 index 0000000000..938069765b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.js","sourceRoot":"","sources":["../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,oEAAoE;AACpE,4EAA4E;AAC5E,6EAA6E;AAE7E,OAAO,EAAQ,kBAAkB,IAAI,6BAA6B,EAAE,MAAM,qBAAqB,CAAC;AAoBhG;;;;;;;;GAQG;AACH,MAAM,UAAU,kBAAkB,CAChC,IAAgB;IAKhB,OAAO,6BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js new file mode 100644 index 0000000000..1ac13dd8ea --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map new file mode 100644 index 0000000000..8786e71699 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;GAEG;AACH,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC,YAAY;AAqB/D;;;;;;GAMG;AACH,MAAM,OAAO,wBAAwB;IAInC;;;OAGG;IACH,YAAY,uBAA+B,oBAAoB;QANvD,gBAAW,GAAiB,SAAS,CAAC;QAO5C,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;IACnD,CAAC;IAED;;;OAGG;IACH,cAAc,CAAC,WAAoC;QACjD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;IACjC,CAAC;IAED;;OAEG;IACH,cAAc;QACZ,IACE,IAAI,CAAC,WAAW;YAChB,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;YACA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;SAC9B;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js new file mode 100644 index 0000000000..a866fb9d16 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} +//# sourceMappingURL=accessTokenRefresher.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map new file mode 100644 index 0000000000..3388d9beef --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.js","sourceRoot":"","sources":["../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;GAIG;AACH,MAAM,OAAO,oBAAoB;IAI/B,YACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK;QAFpD,eAAU,GAAV,UAAU,CAAiB;QAC3B,WAAM,GAAN,MAAM,CAAmB;QACzB,yCAAoC,GAApC,oCAAoC,CAAgB;QALtD,eAAU,GAAG,CAAC,CAAC;IAMpB,CAAC;IAEJ;;;OAGG;IACI,OAAO;QACZ,+EAA+E;QAC/E,OAAO,CACL,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,CAC7F,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACK,KAAK,CAAC,QAAQ,CAAC,OAAwB;QAC7C,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACnE,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;IAC5B,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,OAAwB;QACrC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js new file mode 100644 index 0000000000..d3f2a4faed --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js @@ -0,0 +1,53 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map new file mode 100644 index 0000000000..db7ff15098 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAkB7C;;GAEG;AACH,MAAM,OAAO,iBAAiB;IAU5B;;OAEG;IACH,YAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,0HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,uEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAI,GAAG,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js new file mode 100644 index 0000000000..fc49c72557 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 0000000000..c5129004f9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAI7C,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;GAEG;AACH,MAAM,OAAO,8BAA8B;IAiBzC;;;;;;OAMG;IACH,YACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B;QAhB5D;;;WAGG;QACH,wBAAmB,GAAW,4BAA4B,CAAC;QAczD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,WAAW,CAAC,WAA4B;QACtC,MAAM,WAAW,GAAG,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;QACxD,MAAM,kBAAkB,GAAG,GAAG,IAAI,CAAC,mBAAmB,IAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js new file mode 100644 index 0000000000..3cfbd7a032 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map new file mode 100644 index 0000000000..64678c652e --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A function that receives a challenge and resolves a promise with a string token.\n * @deprecated The Authenticator type is not currently in use.\n */\nexport type Authenticator = (challenge: unknown) => Promise;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js new file mode 100644 index 0000000000..f44565f43f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map new file mode 100644 index 0000000000..48908f3918 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header).\n */\nexport interface ServiceClientCredentials {\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike/request to be signed.\n * @returns The signed request object;\n */\n signRequest(webResource: WebResourceLike): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js new file mode 100644 index 0000000000..ac50243fab --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map new file mode 100644 index 0000000000..b4ff2de964 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;IACH,YAAY,QAAgB;QAC1B,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,MAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js new file mode 100644 index 0000000000..1f5e7fafb1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map new file mode 100644 index 0000000000..99be6d9a2a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { XhrHttpClient as DefaultHttpClient } from \"./xhrHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js new file mode 100644 index 0000000000..79177d54d9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map new file mode 100644 index 0000000000..4e2660b21b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { NodeFetchHttpClient as DefaultHttpClient } from \"./nodeFetchHttpClient\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js b/node_modules/@azure/core-http/dist-esm/src/httpClient.js new file mode 100644 index 0000000000..8908881c39 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map new file mode 100644 index 0000000000..eeb169e529 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../src/httpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy } from \"./policies/requestPolicy\";\n\n/**\n * An interface that can send HttpRequests and receive promised HttpResponses.\n */\nexport interface HttpClient extends RequestPolicy {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js new file mode 100644 index 0000000000..48ec59adce --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "./defaultHttpClient"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new DefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map new file mode 100644 index 0000000000..fd7907ab37 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.js","sourceRoot":"","sources":["../../src/httpClientCache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,IAAI,gBAAwC,CAAC;AAE7C,MAAM,UAAU,0BAA0B;IACxC,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC5C;IAED,OAAO,gBAAgB,CAAC;AAC1B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js new file mode 100644 index 0000000000..e1e0ebac68 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map new file mode 100644 index 0000000000..2245c9f0e9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../src/httpHeaders.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAgB;IAChD,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;YAC3C,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;YACtC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;YACpC,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;YACzC,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;YACvC,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;YAC7C,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;YAC5C,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,WAAW;IAGtB,YAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,UAAkB;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,UAAkB;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,UAAU;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;IAC7C,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,YAAY;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;QAClC,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,UAAsC,EAAE;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aACpC;SACF;aAAM;YACL,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAC7D,CAAC;IAED;;OAEG;IACI,KAAK;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;QAClD,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SACpD;QACD,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;IACjD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js new file mode 100644 index 0000000000..540684d9b4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map new file mode 100644 index 0000000000..abf8659979 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../src/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { WebResourceLike } from \"./webResource\";\n\n/**\n * The properties on an HTTP response which will always be present.\n */\nexport interface HttpResponse {\n /**\n * The raw request\n */\n request: WebResourceLike;\n\n /**\n * The HTTP response status (e.g. 200)\n */\n status: number;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeadersLike;\n}\n\ndeclare global {\n /**\n * Stub declaration of the browser-only Blob type.\n * Full type information can be obtained by including \"lib\": [\"dom\"] in tsconfig.json.\n */\n interface Blob {}\n}\n\n/**\n * Wrapper object for http request and response. Deserialized object is stored in\n * the `parsedBody` property when the response body is received in JSON or XML.\n */\nexport interface HttpOperationResponse extends HttpResponse {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders?: { [key: string]: any };\n\n /**\n * The response body as text (string format)\n */\n bodyAsText?: string | null;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody?: any;\n\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always undefined in node.js.\n */\n blobBody?: Promise;\n\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n}\n\n/**\n * The flattened response to a REST call.\n * Contains the underlying {@link HttpOperationResponse} as well as\n * the merged properties of the `parsedBody`, `parsedHeaders`, etc.\n */\nexport interface RestResponse {\n /**\n * The underlying HTTP response containing both raw and deserialized response data.\n */\n _response: HttpOperationResponse;\n\n /**\n * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body.\n */\n [key: string]: any;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js new file mode 100644 index 0000000000..4e70194eac --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map new file mode 100644 index 0000000000..168e6dcdfe --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js new file mode 100644 index 0000000000..6f61f81d39 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel, message) { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map new file mode 100644 index 0000000000..f4119d43ea --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH,MAAM,OAAO,yBAAyB;IACpC;;;OAGG;IACH,YAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,GAAG,CAAC,QAA8B,EAAE,OAAe;QACjD,MAAM,UAAU,GAAG,GAAG,oBAAoB,CAAC,QAAQ,CAAC,KAAK,OAAO,EAAE,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpPipelineLogLevel } from \"./httpPipelineLogLevel\";\n\n/**\n * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages\n * that can be used for debugging purposes.\n */\nexport interface HttpPipelineLogger {\n /**\n * The log level threshold for what logs will be logged.\n */\n minimumLogLevel: HttpPipelineLogLevel;\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * A HttpPipelineLogger that will send its logs to the console.\n */\nexport class ConsoleHttpPipelineLogger implements HttpPipelineLogger {\n /**\n * Create a new ConsoleHttpPipelineLogger.\n * @param minimumLogLevel - The log level threshold for what logs will be logged.\n */\n constructor(public minimumLogLevel: HttpPipelineLogLevel) {}\n\n /**\n * Log the provided message.\n * @param logLevel - The HttpLogDetailLevel associated with this message.\n * @param message - The message to log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void {\n const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`;\n switch (logLevel) {\n case HttpPipelineLogLevel.ERROR:\n console.error(logMessage);\n break;\n\n case HttpPipelineLogLevel.WARNING:\n console.warn(logMessage);\n break;\n\n case HttpPipelineLogLevel.INFO:\n console.log(logMessage);\n break;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js b/node_modules/@azure/core-http/dist-esm/src/log.js new file mode 100644 index 0000000000..10dd4222b9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-http"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js.map b/node_modules/@azure/core-http/dist-esm/src/log.js.map new file mode 100644 index 0000000000..52607988c6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AACnD,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,WAAW,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js new file mode 100644 index 0000000000..ef2d1e3e7d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js @@ -0,0 +1,313 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "http"; +import * as https from "https"; +import * as tough from "tough-cookie"; +import { AbortController, AbortError } from "@azure/abort-controller"; +import { HttpHeaders } from "./httpHeaders"; +import { createProxyAgent, isUrlHttps } from "./proxyAgent"; +import { Transform } from "stream"; +import FormData from "form-data"; +import { RestError } from "./restError"; +import { logger } from "./log"; +import node_fetch from "node-fetch"; +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +export class ReportTransform extends Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +export class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController = new AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https.globalAgent : http.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch(input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + }); + httpRequest.headers.set("Cookie", cookieString); + } + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + async processRequest(operationResponse) { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader !== undefined) { + await new Promise((resolve, reject) => { + this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); + } + } + } +} +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map new file mode 100644 index 0000000000..2ccc94e8b8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAc,gBAAgB,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AACxE,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAE7C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAGjC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAOpC,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB;IAEtB,OAAO,OAAO,CAAC,CAAC,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BD,MAAM,OAAO,eAAgB,SAAQ,SAAS;IAS5C,YAAoB,gBAA2D;QAC7E,KAAK,EAAE,CAAC;QADU,qBAAgB,GAAhB,gBAAgB,CAA2C;QARvE,gBAAW,GAAW,CAAC,CAAC;IAUhC,CAAC;IATD,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B;QAChF,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACjB,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;IACtB,CAAC;CAKF;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB;IACnE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;QAC7B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,EAAE;YACxB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,KAAK,EAAE,CAAC;YACjB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAChC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,EAAE;QAC7B,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAAhC;QA8LE,wFAAwF;QAChF,kBAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,oBAAe,GAAe,EAAE,CAAC;QAExB,cAAS,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAmHnF,CAAC;IApTC;;;;OAIG;IACH,KAAK,CAAC,WAAW,CAAC,WAA4B;;QAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;QAC9C,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;YAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,aAAa,GAAG,CAAC,KAAY,EAAE,EAAE;gBAC/B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iBACzB;YACH,CAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;SAClE;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,GAAG,EAAE;gBACd,eAAe,CAAC,KAAK,EAAE,CAAC;YAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;SACzB;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;YACxB,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;YAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,0FAA0F;gBAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iBACjB;gBACD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;YAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oBACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,iCAAiC,WAAW,CAAC,WAAW,EAAE,EAAE,CAC7D,CAAC;iBACH;qBAAM;oBACL,kEAAkE;oBAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBAC5C;aACF;SACF;QAED,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;YACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gBACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gBACpB,CAAC,CAAC,WAAW,CAAC,IAAI;YACpB,CAAC,CAAC,SAAS,CAAC;QACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;YACpD,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;YACtD,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;YACjE,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gBAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;aAC/B;iBAAM;gBACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI,GAAG,kBAAkB,CAAC;SAC3B;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;QAEF,MAAM,WAAW,mBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;QAEF,IAAI,iBAAoD,CAAC;QACzD,IAAI;YACF,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAE/C,MAAM,SAAS,GACb,CAAA,MAAA,WAAW,CAAC,yBAAyB,0CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;YAEjC,iBAAiB,GAAG;gBAClB,OAAO,EAAE,OAAO;gBAChB,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;gBACvB,kBAAkB,EAAE,SAAS;oBAC3B,CAAC,CAAE,QAAQ,CAAC,IAAyC;oBACrD,CAAC,CAAC,SAAS;gBACb,UAAU,EAAE,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,SAAS;aAC3D,CAAC;YAEF,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;YAC1D,IAAI,kBAAkB,EAAE;gBACtB,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;gBAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;oBAClC,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;oBACrE,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;oBACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;iBAC7D;qBAAM;oBACL,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;oBACrE,IAAI,MAAM,EAAE;wBACV,wEAAwE;wBACxE,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;qBAC7C;iBACF;aACF;YAED,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;YAE7C,OAAO,iBAAiB,CAAC;SAC1B;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;YACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;gBACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;gBACxC,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,MAAM,UAAU,CAAC;SAClB;gBAAS;YACR,0BAA0B;YAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;gBAC5C,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;oBAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrC,eAAe,CAChB,CAAC;iBACH;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,GAAG,EAAE;;oBACT,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;gBACT,CAAC,CAAC;qBACD,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;oBACX,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;gBAC3E,CAAC,CAAC,CAAC;aACN;SACF;IACH,CAAC;IAQO,gBAAgB,CAAC,WAA4B;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAE5C,2DAA2D;QAC3D,6DAA6D;QAC7D,sDAAsD;QACtD,IAAI,WAAW,CAAC,aAAa,EAAE;YAC7B,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,QAAQ,IAAI,QAAQ,EAAE,CAAC;YACtD,MAAM,WAAW,GAAG,MAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,mCAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;YACjD,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;YAEF,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;gBAClB,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;aACtD;iBAAM;gBACL,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;aACtC;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEzC,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;YAC1D,IAAI,KAAK,EAAE;gBACT,OAAO,KAAK,CAAC;aACd;YAED,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;YAEF,IAAI,OAAO,EAAE;gBACX,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAI,KAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACzE;iBAAM;gBACL,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACvE;YAED,OAAO,KAAK,CAAC;SACd;aAAM;YACL,OAAO,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;SACvD;IACH,CAAC;IAED;;OAEG;IACH,6EAA6E;IAC7E,KAAK,CAAC,KAAK,CAAC,KAAwB,EAAE,IAAwB;QAC5D,OAAO,UAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;IACvE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,WAA4B;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;QAEnF,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YACxD,MAAM,YAAY,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACjE,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,EAAE;oBAC/D,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAC;qBACb;yBAAM;wBACL,OAAO,CAAC,MAAM,CAAC,CAAC;qBACjB;gBACH,CAAC,CAAC,CAAC;YACL,CAAC,CAAC,CAAC;YAEH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;SACjD;QAED,wBAAwB;QACxB,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;QAEvD,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;QAEtD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAAC,iBAAwC;QAC3D,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,MAAM,eAAe,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YACpE,IAAI,eAAe,KAAK,SAAS,EAAE;gBACjC,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBAC1C,IAAI,CAAC,SAAU,CAAC,SAAS,CACvB,eAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,CAAC,GAAG,EAAE,EAAE;wBACN,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAC;yBACb;6BAAM;4BACL,OAAO,EAAE,CAAC;yBACX;oBACH,CAAC,CACF,CAAC;gBACJ,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tough from \"tough-cookie\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a \"Set-Cookie\" header.\n */\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader !== undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js new file mode 100644 index 0000000000..21722f3c89 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map new file mode 100644 index 0000000000..388d777dd4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../src/operationArguments.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase } from \"./webResource\";\n\n/**\n * A collection of properties that apply to a single invocation of an operation.\n */\nexport interface OperationArguments {\n /**\n * The parameters that were passed to the operation method.\n */\n [parameterName: string]: any;\n\n /**\n * The optional arugments that are provided to an operation.\n */\n options?: RequestOptionsBase;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js new file mode 100644 index 0000000000..041c56bcb1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __rest } from "tslib"; +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = __rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} +//# sourceMappingURL=operationOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map new file mode 100644 index 0000000000..293a7ad37a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.js","sourceRoot":"","sources":["../../src/operationOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAuDlC;;;;GAIG;AACH,MAAM,UAAU,oCAAoC,CAClD,IAAO;IAEP,MAAM,EAAE,cAAc,EAAE,cAAc,KAA2B,IAAI,EAA1B,iBAAiB,UAAK,IAAI,EAA/D,oCAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;IAEnD,IAAI,cAAc,EAAE;QAClB,MAAM,mCAAQ,MAAM,GAAK,cAAc,CAAE,CAAC;KAC3C;IAED,IAAI,cAAc,EAAE;QAClB,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;QACtD,+HAA+H;QAC/H,MAAM,CAAC,WAAW,GAAI,cAAsB,aAAtB,cAAc,uBAAd,cAAc,CAAU,WAAW,CAAC;KAC3D;IAED,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js new file mode 100644 index 0000000000..1361e298ae --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map new file mode 100644 index 0000000000..f3a736a467 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../src/operationParameter.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqDlC;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js new file mode 100644 index 0000000000..6781aacaef --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map new file mode 100644 index 0000000000..bd4a1398a8 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../src/operationResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\n\n/**\n * An OperationResponse that can be returned from an operation request for a single status code.\n */\nexport interface OperationResponse {\n /**\n * The mapper that will be used to deserialize the response headers.\n */\n headersMapper?: Mapper;\n\n /**\n * The mapper that will be used to deserialize the response body.\n */\n bodyMapper?: Mapper;\n\n /**\n * Indicates if this is an error response\n */\n isError?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js new file mode 100644 index 0000000000..2ba9e6c89b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperType } from "./serializer"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map new file mode 100644 index 0000000000..2843db2bae --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../src/operationSpec.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA8FtD;;;GAGG;AACH,MAAM,UAAU,4BAA4B,CAAC,aAA4B;IACvE,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;IACjC,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SAChC;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js new file mode 100644 index 0000000000..2abe14a881 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pipelineOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map new file mode 100644 index 0000000000..d1dc2e2567 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.js","sourceRoot":"","sources":["../../src/pipelineOptions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DeserializationOptions } from \"./policies/deserializationPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { KeepAliveOptions } from \"./policies/keepAlivePolicy\";\nimport { LogPolicyOptions } from \"./policies/logPolicy\";\nimport { ProxyOptions } from \"./serviceClient\";\nimport { RedirectOptions } from \"./policies/redirectPolicy\";\nimport { RetryOptions } from \"./policies/exponentialRetryPolicy\";\nimport { UserAgentOptions } from \"./policies/userAgentPolicy\";\n\n/**\n * Defines options that are used to configure the HTTP pipeline for\n * an SDK client.\n */\nexport interface PipelineOptions {\n /**\n * The HttpClient implementation to use for outgoing HTTP requests. Defaults\n * to DefaultHttpClient.\n */\n httpClient?: HttpClient;\n\n /**\n * Options that control how to retry failed requests.\n */\n retryOptions?: RetryOptions;\n\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n\n /**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\n keepAliveOptions?: KeepAliveOptions;\n\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n}\n\n/**\n * Defines options that are used to configure internal options of\n * the HTTP pipeline for an SDK client.\n */\nexport interface InternalPipelineOptions extends PipelineOptions {\n /**\n * Options to configure API response deserialization.\n */\n deserializationOptions?: DeserializationOptions;\n\n /**\n * Options to configure request/response logging.\n */\n loggingOptions?: LogPolicyOptions;\n\n /**\n * Configure whether to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n\n /**\n * Send JSON Array payloads as NDJSON.\n */\n sendStreamingJson?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 0000000000..22abdb45f5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "../policies/requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "../util/delay"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map new file mode 100644 index 0000000000..b0ac35f0d5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.js","sourceRoot":"","sources":["../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EACL,iBAAiB,GAIlB,MAAM,2BAA2B,CAAC;AACnC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAiCtC,sDAAsD;AACtD,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AACH,MAAM,UAAU,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,eAAe,CAAC,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB;QAC7D,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CAAC;YACH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;QACnD,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClE,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js new file mode 100644 index 0000000000..1390c84181 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js @@ -0,0 +1,239 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { XML_CHARKEY } from "../util/serializer.common"; +import { MapperType } from "../serializer"; +import { RestError } from "../restError"; +import { parseXML } from "../util/xml"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +export const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map new file mode 100644 index 0000000000..900169ba27 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAqB,WAAW,EAAE,MAAM,2BAA2B,CAAC;AAE3E,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAG3C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AA+BvC;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC;IAElC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF,MAAM,CAAC,MAAM,6BAA6B,GAA2B;IACnE,oBAAoB,EAAE;QACpB,IAAI,EAAE,uBAAuB;QAC7B,GAAG,EAAE,sBAAsB;KAC5B;CACF,CAAC;AAEF;;;GAGG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAK1D,YACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE;;QAEtC,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;QAExC,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,MAAA,cAAc,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC7D,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,EAAE,EAAE,CACpF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;SAC5B,CAAC,CACH,CAAC;IACJ,CAAC;CACF;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,UAA6B,EAAE;;IAE/B,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;QAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;QACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;IACF,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,EAAE,EAAE;QACjB,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;QAC3D,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;YAC9C,OAAO,cAAc,CAAC;SACvB;QAED,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;QAE1D,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;QACF,IAAI,KAAK,EAAE;YACT,MAAM,KAAK,CAAC;SACb;aAAM,IAAI,oBAAoB,EAAE;YAC/B,OAAO,cAAc,CAAC;SACvB;QAED,oEAAoE;QACpE,sCAAsC;QACtC,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;gBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;gBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;4BACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;4BAC7D,CAAC,CAAC,EAAE,CAAC;iBACV;gBACD,IAAI;oBACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;iBACH;gBAAC,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,iDAAiD,cAAc,CAAC,UAAU,EAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;oBACF,MAAM,SAAS,CAAC;iBACjB;aACF;iBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;gBAC9C,uGAAuG;gBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;aAC7E;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,EAC5B,OAAO,CACR,CAAC;aACH;SACF;QAED,OAAO,cAAc,CAAC;IACxB,CAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;IACjE,OAAO,CACL,mBAAmB,CAAC,MAAM,KAAK,CAAC;QAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAC3E,CAAC;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C;;IAE3C,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;IACtF,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;QACvE,CAAC,CAAC,iBAAiB;QACnB,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAEnB,IAAI,oBAAoB,EAAE;QACxB,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;aACrD;SACF;aAAM;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;SACrD;KACF;IAED,MAAM,iBAAiB,GAAG,YAAY,aAAZ,YAAY,cAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;IAC1E,MAAM,SAAS,GACb,CAAA,MAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;QAC5E,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;QACnC,CAAC,CAAC,2BAA2B,cAAc,CAAC,MAAM,EAAE;QACpD,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;IAE1C,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;IAEF,yFAAyF;IACzF,sDAAsD;IACtD,IAAI,CAAC,iBAAiB,EAAE;QACtB,MAAM,KAAK,CAAC;KACb;IAED,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;IACvD,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;QACF,iFAAiF;QACjF,mDAAmD;QACnD,IAAI,cAAc,CAAC,UAAU,EAAE;YAC7B,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;YAC7C,IAAI,WAAW,CAAC;YAChB,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;gBACzC,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;wBAChB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;iBACvF;gBACD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;aACH;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;YACzE,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;gBACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;aACvC;YAED,IAAI,iBAAiB,EAAE;gBACrB,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;aAC1C;SACF;QAED,mFAAmF;QACnF,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;SACH;KACF;IAAC,OAAO,YAAiB,EAAE;QAC1B,KAAK,CAAC,OAAO,GAAG,UAAU,YAAY,CAAC,OAAO,mDAAmD,cAAc,CAAC,UAAU,6BAA6B,CAAC;KACzJ;IAED,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC;;IAEjC,MAAM,YAAY,GAAG,CAAC,GAA6B,EAAkB,EAAE;QACrE,MAAM,GAAG,GAAG,UAAU,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,GAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,MAAM,SAAS,GACb,CAAA,MAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;QAClF,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC/C,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;QAC9C,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,EAAE,EAAE;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE,CAAC,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;iBACxB,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;gBACb,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js new file mode 100644 index 0000000000..00afedf0fe --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json + */ +import { BaseRequestPolicy, } from "./requestPolicy"; +const DisbleResponseDecompressionNotSupportedInBrowser = new Error("DisableResponseDecompressionPolicy is not supported in browser environment"); +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (_nextPolicy, _options) => { + throw DisbleResponseDecompressionNotSupportedInBrowser; + }, + }; +} +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw DisbleResponseDecompressionNotSupportedInBrowser; + } + async sendRequest(_request) { + throw DisbleResponseDecompressionNotSupportedInBrowser; + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map new file mode 100644 index 0000000000..88145bcbae --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,gDAAgD,GAAG,IAAI,KAAK,CAChE,4EAA4E,CAC7E,CAAC;AAEF;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,gDAAgD,CAAC;QACzD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,gDAAgD,CAAC;IACzD,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,QAAqB;QAC5C,MAAM,gDAAgD,CAAC;IACzD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n */\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\nconst DisbleResponseDecompressionNotSupportedInBrowser = new Error(\n \"DisableResponseDecompressionPolicy is not supported in browser environment\"\n);\n\n/**\n * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting\n * to use it will results in error being thrown.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n },\n };\n}\n\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n }\n\n public async sendRequest(_request: WebResource): Promise {\n throw DisbleResponseDecompressionNotSupportedInBrowser;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js new file mode 100644 index 0000000000..bf033e0048 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=disableResponseDecompressionPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map new file mode 100644 index 0000000000..5a50cd8fa2 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.js","sourceRoot":"","sources":["../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,kCAAkC;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,MAAM,OAAO,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;IACH,uCAAuC;IACvC,wEAAwE;IACxE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js new file mode 100644 index 0000000000..56768cc134 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { Constants } from "../util/constants"; +import { RestError } from "../restError"; +import { delay } from "../util/delay"; +import { logger } from "../log"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export var RetryMode; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(RetryMode || (RetryMode = {})); +export const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} +async function retry(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry(policy, request, res, retryData); + } + catch (err) { + return retry(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map new file mode 100644 index 0000000000..2ec16229cc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAC5C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,SAMX;AAND,WAAY,SAAS;IACnB;;;OAGG;IACH,uDAAW,CAAA;AACb,CAAC,EANW,SAAS,KAAT,SAAS,QAMpB;AA8BD,MAAM,CAAC,MAAM,mBAAmB,GAAiB;IAC/C,UAAU,EAAE,0BAA0B;IACtC,cAAc,EAAE,6BAA6B;IAC7C,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;GAEG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAc3D;;;;;;;OAOG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;IAC9E,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,iBAAiB,CAAC,aAAqC;QAC9D,MAAM,UAAU,GAAG,aAAa,aAAb,aAAa,uBAAb,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;YACtF,OAAO,KAAK,CAAC;SACd;QAED,IACE,UAAU,KAAK,SAAS;YACxB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;QACnC,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;KAC1C,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,uBAAuB,SAAS,CAAC,aAAa,EAAE,CAAC,CAAC;QAC9D,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;SAC/C;QAAC,OAAO,GAAQ,EAAE;YACjB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;SACzD;KACF;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,MAAM,GAAG,CAAC;KACX;SAAM;QACL,OAAO,QAAQ,CAAC;KACjB;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js new file mode 100644 index 0000000000..868c1b71cc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 0000000000..a05940d6e9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB;;;GAGG;AACH,MAAM,UAAU,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB;IAE9C,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,6BAA8B,SAAQ,iBAAiB;IAClE,YACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,yBAAoB,GAApB,oBAAoB,CAAQ;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;SACnE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js new file mode 100644 index 0000000000..22581800c7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * By default, HTTP connections are maintained for future requests. + */ +export const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=keepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map new file mode 100644 index 0000000000..4cec3806e5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.js","sourceRoot":"","sources":["../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAgBzB;;GAEG;AACH,MAAM,CAAC,MAAM,uBAAuB,GAAqB;IACvD,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,gBAAmC;IACjE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;QAC/F,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC;QAEnD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,qBAAgB,GAAhB,gBAAgB,CAAkB;IAGrD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js new file mode 100644 index 0000000000..05d1225a43 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Sanitizer } from "../util/sanitizer"; +import { logger as coreLogger } from "../log"; +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger = coreLogger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map new file mode 100644 index 0000000000..ac9f9838cf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,MAAM,IAAI,UAAU,EAAE,MAAM,QAAQ,CAAC;AAiC9C;;;;GAIG;AACH,MAAM,UAAU,SAAS,CAAC,iBAAmC,EAAE;IAC7D,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAC5D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,iBAAiB;IA4C9C,YACE,UAAyB,EACzB,OAA6B,EAC7B,EACE,MAAM,GAAG,UAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE;QAExB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;IACjF,CAAC;IApDD;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B;QAC3D,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;IACzD,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC;QACnE,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;IACjE,CAAC;IAgBM,WAAW,CAAC,OAAwB;QACzC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;QAEvE,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;IAC9F,CAAC;IAEO,UAAU,CAAC,OAAwB;QACzC,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC9D,CAAC;IAEO,WAAW,CAAC,QAA+B;QACjD,IAAI,CAAC,MAAM,CAAC,yBAAyB,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC;QACxD,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACrE,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 0000000000..36467994f3 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const navigator = self.navigator; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 0000000000..b6f195622f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js new file mode 100644 index 0000000000..e1210a9ac4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 0000000000..738f7c7c13 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js new file mode 100644 index 0000000000..fe17cf341c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const { Platform } = require("react-native"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports +export function getDefaultUserAgentKey() { + return "x-ms-useragent"; +} +export function getPlatformSpecificData() { + const { major, minor, patch } = Platform.constants.reactNativeVersion; + const runtimeInfo = { + key: "react-native", + value: `${major}.${minor}.${patch}`, + }; + const osInfo = { + key: "OS", + value: `${Platform.OS}-${Platform.Version}`, + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.native.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map new file mode 100644 index 0000000000..641603ba3c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.js","sourceRoot":"","sources":["../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,EAAE,QAAQ,EAAE,GAAG,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,+FAA+F;AAE7I,MAAM,UAAU,sBAAsB;IACpC,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAC,SAAS,CAAC,kBAAkB,CAAC;IACtE,MAAM,WAAW,GAAG;QAClB,GAAG,EAAE,cAAc;QACnB,KAAK,EAAE,GAAG,KAAK,IAAI,KAAK,IAAI,KAAK,EAAE;KACpC,CAAC;IAEF,MAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,GAAG,QAAQ,CAAC,EAAE,IAAI,QAAQ,CAAC,OAAO,EAAE;KAC5C,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/*\n * NOTE: When moving this file, please update \"react-native\" section in package.json.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nconst { Platform } = require(\"react-native\"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-useragent\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const { major, minor, patch } = Platform.constants.reactNativeVersion;\n const runtimeInfo = {\n key: \"react-native\",\n value: `${major}.${minor}.${patch}`,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `${Platform.OS}-${Platform.Version}`,\n };\n\n return [runtimeInfo, osInfo];\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js new file mode 100644 index 0000000000..19119e5554 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// BaseRequestPolicy has a protected constructor. +/* eslint-disable @typescript-eslint/no-useless-constructor */ +import { BaseRequestPolicy, } from "./requestPolicy"; +export function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map new file mode 100644 index 0000000000..4513fcf27a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.js","sourceRoot":"","sources":["../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,iDAAiD;AACjD,8DAA8D;AAE9D,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,YAAY;IAC1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC/C,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB;IAC1C;;OAEG;IACH,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,4DAA4D;QAC5D,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;YACtC,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;aACzE;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js new file mode 100644 index 0000000000..260366e226 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +const proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: (_nextPolicy, _options) => { + throw proxyNotSupportedInBrowser; + }, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + throw proxyNotSupportedInBrowser; + } + sendRequest(_request) { + throw proxyNotSupportedInBrowser; + } +} +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map new file mode 100644 index 0000000000..826436815d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB,MAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,CAAC,WAA0B,EAAE,QAA8B,EAAE,EAAE;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,MAAM,0BAA0B,CAAC;IACnC,CAAC;IAEM,WAAW,CAAC,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptions) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js new file mode 100644 index 0000000000..031230bc7a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js @@ -0,0 +1,150 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +import { getEnvironmentValue } from "../util/utils"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map new file mode 100644 index 0000000000..484a8f4c32 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EAAE,mBAAmB,EAAE,MAAM,eAAe,CAAC;AAEpD;;;GAGG;AACH,MAAM,CAAC,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC,yDAAyD;AACzD,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;GAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC;IAElC,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC9B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;QACjC,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;IACzB,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAED,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,aAA6B,EAC7B,OAGC;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,IAAI,CAAC,iBAAiB,EAAE;QACtB,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;KAC1C;IACD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,EAAE,EAAE;YAChF,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,MAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,WAAY,SAAQ,iBAAiB;IAChD,YACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,kBAAa,GAAb,aAAa,CAAe;QAC3B,sBAAiB,GAAjB,iBAAiB,CAAW;IAGtC,CAAC;IAEM,WAAW,CAAC,OAAwB;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,MAAA,IAAI,CAAC,iBAAiB,mCAAI,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,iBAAiB,CACvD,EACD;YACA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js new file mode 100644 index 0000000000..2a8a0597f1 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js @@ -0,0 +1,64 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { URLBuilder } from "../url"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +export const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map new file mode 100644 index 0000000000..a4c6c8c17a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;GAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBxC,MAAM,CAAC,MAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,cAAc,GAAG,EAAE;IAChD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,iBAAiB;IACnD,YAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE;QAC5F,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,eAAU,GAAV,UAAU,CAAK;IAE9F,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;IAC3D,CAAC;CACF;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEtB,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,+DAA+D;QAC/D,+EAA+E;QAC/E,IAAI,MAAM,KAAK,GAAG,EAAE;YAClB,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;KACnE;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js new file mode 100644 index 0000000000..639c271b25 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +/** + * The base class from which all request policies derive. + */ +export class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map new file mode 100644 index 0000000000..eb09d86595 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAsB/D;;GAEG;AACH,MAAM,OAAgB,iBAAiB;IACrC;;OAEG;IACH;IACE;;OAEG;IACM,WAA0B;IACnC;;OAEG;IACM,QAAkC;QAJlC,gBAAW,GAAX,WAAW,CAAe;QAI1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAQJ;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;CACF;AAsBD;;GAEG;AACH,MAAM,OAAO,oBAAoB;IAC/B,YAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,SAAS,CAAC,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js new file mode 100644 index 0000000000..5f5e3fff87 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { delay } from "../util/delay"; +export function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map new file mode 100644 index 0000000000..120c82cf52 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,MAAM,UAAU,oBAAoB,CAAC,YAAY,GAAG,EAAE;IACpD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD,YACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE;QAE3B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,kBAAa,GAAb,aAAa,CAAK;IAG7B,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,CAAC,QAAQ,EAAE,EAAE,CAAC,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC;iBAClB,IAAI,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC3B,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK;IAEnB,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAQ,EAAE;YACjB,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,GAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,KAAK,UAAU,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,MAAM,OAAO,GAAG,GAAG,SAAS,aAAa,QAAQ,kCAAkC,CAAC;IACpF,MAAM,MAAM,GAAG,GAAG,SAAS,aAAa,QAAQ,yBAAyB,CAAC;IAC1E,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAClE,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,2CAA2C,CAAC,CAAC;KAC7F;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;GAQG;AACH,KAAK,UAAU,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;IAC7D,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;IAC3B,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;QACrF,OAAO,IAAI,CAAC;KACb;SAAM;QACL,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;KAC5D;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js new file mode 100644 index 0000000000..c298812cbe --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map new file mode 100644 index 0000000000..1fbcc9842c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAKzB;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAClD,YACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD;QAEvD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,2BAAsB,GAAtB,sBAAsB,CAA0B;IAGzD,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,EAAE,CACpD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js new file mode 100644 index 0000000000..84c6793f4f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_MIN_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; +import { delay } from "../util/delay"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 0000000000..ec403a8b59 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,iCAAiC,EACjC,iCAAiC,EACjC,0BAA0B,EAC1B,6BAA6B,EAG7B,QAAQ,EACR,WAAW,EACX,eAAe,GAChB,MAAM,oCAAoC,CAAC;AAG5C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC;;;;;;;GAOG;AACH,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;GAMG;AACH,MAAM,OAAO,sBAAuB,SAAQ,iBAAiB;IAM3D,YACE,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;IACxC,CAAC;IAEM,WAAW,CAAC,OAAwB;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IACnE,CAAC;CACF;AAED,KAAK,UAAU,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;IAEpD,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB;QAC9E,IACE,KAAK;YACL,KAAK,CAAC,IAAI;YACV,CAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;gBAC3B,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;QACxF,mFAAmF;QACnF,IAAI;YACF,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;SACxD;QAAC,OAAO,SAAc,EAAE;YACvB,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;SACxE;KACF;SAAM;QACL,IAAI,GAAG,EAAE;YACP,qFAAqF;YACrF,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;SACxC;QACD,OAAO,iBAAiB,CAAC;KAC1B;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js new file mode 100644 index 0000000000..e20e000e32 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { AbortError } from "@azure/abort-controller"; +import { Constants } from "../util/constants"; +import { DEFAULT_CLIENT_MAX_RETRY_COUNT } from "../util/throttlingRetryStrategy"; +import { delay } from "../util/delay"; +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await delay(delayInMs, undefined, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map new file mode 100644 index 0000000000..35066f9f05 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AACrD,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,8BAA8B,EAAE,MAAM,iCAAiC,CAAC;AAGjF,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAMtC,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;GASG;AACH,MAAM,UAAU,qBAAqB;IACnC,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QACxD,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAsB,SAAQ,iBAAiB;IAI1D,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC;QAEjC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,oBAAe,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;IACzE,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;QACnD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;QACzE,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;YAC/C,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;YACA,OAAO,QAAQ,CAAC;SACjB;aAAM;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;SACpD;IACH,CAAC;IAEO,KAAK,CAAC,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC;;QAEnC,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;QAEF,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;YAChE,IAAI,SAAS,EAAE;gBACb,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;gBAE1B,MAAM,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE;oBAChC,WAAW,EAAE,WAAW,CAAC,WAAW;oBACpC,aAAa,EAAE,oBAAoB;iBACpC,CAAC,CAAC;gBAEH,IAAI,MAAA,WAAW,CAAC,WAAW,0CAAE,OAAO,EAAE;oBACpC,MAAM,IAAI,UAAU,CAAC,oBAAoB,CAAC,CAAC;iBAC5C;gBAED,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;oBACzD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBACtC;qBAAM;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;SACF;QAED,OAAO,YAAY,CAAC;IACtB,CAAC;IAEM,MAAM,CAAC,qBAAqB,CAAC,WAAmB;QACrD,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEM,MAAM,CAAC,yBAAyB,CAAC,WAAmB;QACzD,IAAI;YACF,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, undefined, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js new file mode 100644 index 0000000000..2a1b6b3f61 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js @@ -0,0 +1,126 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { SpanKind, SpanStatusCode, createSpanFunction, getTraceParentHeader, isSpanContextValid, } from "@azure/core-tracing"; +import { logger } from "../log"; +const createSpan = createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = getTraceParentHeader(spanContext); + if (traceParentHeader && isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map new file mode 100644 index 0000000000..16c3c85542 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.js","sourceRoot":"","sources":["../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAEL,QAAQ,EACR,cAAc,EACd,kBAAkB,EAClB,oBAAoB,EACpB,kBAAkB,GACnB,MAAM,qBAAqB,CAAC;AAG7B,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IACpC,aAAa,EAAE,EAAE;IACjB,SAAS,EAAE,EAAE;CACd,CAAC,CAAC;AAYH;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,iBAAuC,EAAE;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QAChE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,iBAAiB;IAGlD,YACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC;QAEpC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;IAC5C,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAwB;QAC/C,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;YAC7D,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;YACxC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;YAChC,MAAM,GAAG,CAAC;SACX;IACH,CAAC;IAED,aAAa,CAAC,OAAwB;;QACpC,IAAI;YACF,sHAAsH;YACtH,uFAAuF;YACvF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,QAAQ,OAAO,CAAC,MAAM,EAAE,EAAE;gBACpD,cAAc,EAAE;oBACd,WAAW,kCACL,OAAe,CAAC,WAAW,KAC/B,IAAI,EAAE,QAAQ,CAAC,MAAM,GACtB;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;iBACvC;aACF,CAAC,CAAC;YAEH,wDAAwD;YACxD,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;gBACX,OAAO,SAAS,CAAC;aAClB;YAED,MAAM,oBAAoB,GAAG,MAAA,OAAO,CAAC,cAAc,0CAAE,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;YAE1F,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;gBAC5C,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;aACzD;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;aAC7B,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACtD;YAED,cAAc;YACd,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;YACvC,MAAM,iBAAiB,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;YAC5D,IAAI,iBAAiB,IAAI,kBAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;gBACtD,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;gBAChF,0FAA0F;gBAC1F,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC/C;aACF;YACD,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;YACrF,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;aACrB,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;YACjE,IAAI,gBAAgB,EAAE;gBACpB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACzD;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,EAAE;aACxB,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,qDAAqD,KAAK,CAAC,OAAO,EAAE,CAAC,CAAC;SACtF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js new file mode 100644 index 0000000000..527856c208 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, } from "./requestPolicy"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { Constants } from "../util/constants"; +import { HttpHeaders } from "../httpHeaders"; +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map new file mode 100644 index 0000000000..cbc2aa1232 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAoB7C,SAAS,cAAc;IACrB,MAAM,aAAa,GAAG;QACpB,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,GAAG,IAAI,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;GAGG;AACH,MAAM,UAAU,wBAAwB;IACtC,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;QAC7E,CAAC,CAAC,sBAAsB,EAAE;QAC1B,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IACxB,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;QACjF,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IACpD,YACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB;QAE7B,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAAsB;QAC7B,cAAS,GAAT,SAAS,CAAQ;QACjB,gBAAW,GAAX,WAAW,CAAQ;IAG/B,CAAC;IAED,WAAW,CAAC,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED;;OAEG;IACH,kBAAkB,CAAC,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js new file mode 100644 index 0000000000..47469975e7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map new file mode 100644 index 0000000000..741f8ef9bc --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../src/proxyAgent.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAGnC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;KAClE;IACD,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;KAChG;IACD,MAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,EAAE,CAAC;KAC9D;IAED,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;IAEpD,MAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC1D,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY;IAC/B,sFAAsF;IACtF,oFAAoF;IACpF,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js new file mode 100644 index 0000000000..ff08c3dc32 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map new file mode 100644 index 0000000000..2a917a84b5 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAN,IAAY,qBAqBX;AArBD,WAAY,qBAAqB;IAC/B;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,kCAAS,CAAA;IACT;;OAEG;IACH,mCAAU,CAAA;IACV;;OAEG;IACH,oCAAW,CAAA;IACX;;OAEG;IACH,wCAAe,CAAA;AACjB,CAAC,EArBW,qBAAqB,KAArB,qBAAqB,QAqBhC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js b/node_modules/@azure/core-http/dist-esm/src/restError.js new file mode 100644 index 0000000000..88d4a7803c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Sanitizer } from "./util/sanitizer"; +import { custom } from "./util/inspect"; +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js.map b/node_modules/@azure/core-http/dist-esm/src/restError.js.map new file mode 100644 index 0000000000..8270ecfe29 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../src/restError.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,OAAO,EAAE,MAAM,EAAE,MAAM,gBAAgB,CAAC;AAExC,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;GAEG;AACH,MAAM,OAAO,SAAU,SAAQ,KAAK;IA8BlC,YACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;IACnD,CAAC;IAED;;OAEG;IACH,CAAC,MAAM,CAAC;QACN,OAAO,cAAc,IAAI,CAAC,OAAO,OAAO,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC;IAC1E,CAAC;;AAnDD;;GAEG;AACa,4BAAkB,GAAW,oBAAoB,CAAC;AAClE;;GAEG;AACa,qBAAW,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js b/node_modules/@azure/core-http/dist-esm/src/serializer.js new file mode 100644 index 0000000000..c8b476247c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js @@ -0,0 +1,906 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* eslint-disable eqeqeq */ +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = base64.encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!utils.isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js.map b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map new file mode 100644 index 0000000000..86e9af376d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../src/serializer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,2BAA2B;AAE3B,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AAEvF,qLAAqL;AAErL;;;;GAIG;AACH,MAAM,OAAO,UAAU;IACrB;IACE;;OAEG;IACa,eAAuC,EAAE;IACzD;;OAEG;IACa,KAAe;QAJf,iBAAY,GAAZ,YAAY,CAA6B;QAIzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ;;;;;OAKG;IACH,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB;QACpE,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,EACb,EAAE;YACT,MAAM,IAAI,KAAK,CACb,IAAI,UAAU,iBAAiB,KAAK,oCAAoC,cAAc,MAAM,eAAe,GAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;gBACtE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;gBACrE,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;gBAC3D,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;gBAC7D,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;gBAC/D,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,EAAE,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;OAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAE1C,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,uBAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,+BAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,kBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;aACpE;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;aACH;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE;;QAE/B,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;YAChC,WAAW,EAAE,MAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;YACzC,UAAU,EAAE,MAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;SACH;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;gBACjE;;;;mBAIG;gBACH,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;oBAC1C,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;oBACA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;iBAC7C;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;gBAC1C,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;gBAC7C,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;gBAC1E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;aAC5C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;aAClD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAsB,CAAC,CAAC;aACvD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;aACxD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;CACF;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,MAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;IAChD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;YAC3B,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YACxC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,0BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,gBAAgB,KAAK,2BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YAC7C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,gBAAgB,KAAK,4CAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,eAAe,KAAK,2BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;YAC/C,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,CAAC,IAAI,KAAK,YAAY,IAAI,CAAC,EACpF;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,uGAAuG,CACrH,CAAC;aACH;SACF;KACF;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,mBAAmB,CACnF,CAAC;KACH;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,EAAE;QAC5C,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,GAAG,KAAK,6BAA6B,UAAU,2BAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,GAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KAC7C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,8BAA8B,CAAC,CAAC;SAC9D;QACD,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;KAC9C;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;YACtC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,4DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;YACxD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,6DAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,qEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,sDAAsD,KAAK,IAAI,CAC7E,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,yBAAyB,CAAC,CAAC;KACzD;IACD,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;YACrC,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;gBAC7C,CAAC,CAAC,SAAS,WAAW,CAAC,kBAAkB,EAAE;gBAC3C,CAAC,CAAC,OAAO,CAAC;YACZ,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;gBACzC,SAAS,CAAC,CAAC,CAAC,qBAAQ,eAAe,CAAE,CAAC;gBACtC,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;gBACnD,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,YAAY,EAAE,CAAC;aACtE;SACF;aAAM;YACL,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;SAChC;KACF;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,0BAA0B,CAAC,CAAC;KAC1D;IACD,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,GAAG,CAC1D,CAAC;KACH;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;QACrC,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;QAC1F,gFAAgF;QAChF,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;KACrF;IAED,kDAAkD;IAClD,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;QAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;QAC9B,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,EAAE,CAAC;QAC1D,OAAO,MAAM,CAAC;KACf;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;GAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;KAC/C;IAED,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,oCAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,IAAI,CACN,CAAC;KACH;IAED,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,mDAAmD,MAAM,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,CAAC;SAC/F;QACD,UAAU,GAAG,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;gBACnD,WAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,cACpC,MAAM,CAAC,IAAI,CAAC,SACd,iBAAiB,UAAU,IAAI,CAClC,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC;IAEpC,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;YACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;oBAC5B,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;wBACxB,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;wBACA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;oBAChC,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;wBACxC,CAAC,CAAC,SAAS,MAAM,CAAC,kBAAkB,EAAE;wBACtC,CAAC,CAAC,OAAO,CAAC;oBACZ,YAAY,CAAC,WAAW,CAAC,mCACpB,YAAY,CAAC,WAAW,CAAC,KAC5B,CAAC,QAAQ,CAAC,EAAE,MAAM,CAAC,YAAY,GAChC,CAAC;iBACH;gBACD,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;gBAEF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;oBACjF,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;wBAC1C,uEAAuE;wBACvE,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBACvD;yBAAM,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/C,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,CAAC,EAAE,KAAK,EAAE,CAAC;qBACtE;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;qBAChC;iBACF;aACF;SACF;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC/F,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAC1C,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;gBACnC,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,KAAK,cAAc,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;iBACH;aACF;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC;IAEpC,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;QAC1C,OAAO,eAAe,CAAC;KACxB;IAED,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;QAChD,CAAC,CAAC,SAAS,cAAc,CAAC,kBAAkB,EAAE;QAC9C,CAAC,CAAC,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,cAAc,CAAC,YAAY,EAAE,CAAC;IAEjE,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACpD,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;YAChC,OAAO,eAAe,CAAC;SACxB;aAAM;YACL,MAAM,MAAM,qBAAa,eAAe,CAAE,CAAC;YAC3C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;KACF;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;IAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;IACnC,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC;IACtF,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;;IAEpC,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;QACzC,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;gBACjD,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;aACH;iBAAM;gBACL,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B;;;;;;;;;;;;;sBAaE;oBACF,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBACvC,MAAM,WAAW,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAG,cAAe,CAAC,mCAAI,EAAE,CAAC;oBACrD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;qBAAM;oBACL,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBAC7C,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;iBACH;aACF;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;gBACxB,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;oBAC7C,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;wBAC3D,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;qBACtB;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,EAAW,EAAE;YACjE,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,2DAA2D;YACzD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;YAC3C,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;SAC7F;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC;IAEpC,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,wDAAwD;YACtD,0CAA0C,UAAU,EAAE,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,GAAG,UAAU,IAAI,CAAC,GAAG,EACrB,OAAO,CACR,CAAC;SACH;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AAyTD;;GAEG;AACH,MAAM,UAAU,eAAe,CAAC,WAAoB;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;SACnE;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;QACnB,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,2DAA2D;AAC3D,MAAM,CAAC,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js new file mode 100644 index 0000000000..b817630bf4 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js @@ -0,0 +1,622 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as utils from "./util/utils"; +import { MapperType } from "./serializer"; +import { DefaultDeserializationOptions, deserializationPolicy, } from "./policies/deserializationPolicy"; +import { DefaultKeepAliveOptions, keepAlivePolicy } from "./policies/keepAlivePolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { DefaultRetryOptions, exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { logPolicy } from "./policies/logPolicy"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { getStreamResponseStatusCodes } from "./operationSpec"; +import { WebResource, isWebResourceLike, } from "./webResource"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; +import { isNode } from "./util/utils"; +import { isTokenCredential } from "@azure/core-auth"; +import { getDefaultUserAgentHeaderName, getDefaultUserAgentValue, userAgentPolicy, } from "./policies/userAgentPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { URLBuilder } from "./url"; +import { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +import { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { getCachedDefaultHttpClient } from "./httpClientCache"; +import { logger } from "./log"; +import { ndJsonPolicy } from "./policies/ndJsonPolicy"; +import { proxyPolicy } from "./policies/proxyPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { stringifyXML } from "./util/xml"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { tracingPolicy } from "./policies/tracingPolicy"; +/** + * ServiceClient sends service requests and receives responses. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + const propertyPathLength = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + const scopes = options.credentialScopes; + return Array.isArray(scopes) + ? scopes.map((scope) => new URL(scope).toString()) + : new URL(scopes).toString(); + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map new file mode 100644 index 0000000000..4c669b17eb --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../src/serviceClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EACL,6BAA6B,EAE7B,qBAAqB,GACtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,uBAAuB,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AACtF,OAAO,EAAE,sBAAsB,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACnF,OAAO,EAAE,mBAAmB,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAEhG,OAAO,EAAoB,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EAGL,0BAA0B,EAC1B,8BAA8B,GAC/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAiB,4BAA4B,EAAE,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAGL,WAAW,EAEX,iBAAiB,GAClB,MAAM,eAAe,CAAC;AACvB,OAAO,EAGL,oBAAoB,GACrB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,0BAA0B,CAAC;AACvF,OAAO,EAAmB,MAAM,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACtE,OAAO,EACL,6BAA6B,EAC7B,wBAAwB,EACxB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAMpC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAC/D,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AACvD,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAmGzD;;GAEG;AACH,MAAM,OAAO,aAAa;IAsBxB;;;;OAIG;IACH,YACE,WAAwD;IACxD,iEAAiE;IACjE,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;YAC5D,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;YACpE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;gBACF,wEAAwE;gBACxE,yEAAyE;gBACzE,oEAAoE;gBACpE,6EAA6E;gBAC7E,sEAAsE;gBACtE,gCAAgC;gBAChC,MAAM,oBAAoB,GAA+B,GAAG,EAAE;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;oBAC3E,4DAA4D;oBAC5D,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,mKAAmK,CACpK,CAAC;6BACH;4BAED,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;gCAC/E,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;6BACH;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;wBACpE,CAAC;qBACF,CAAC;gBACJ,CAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;aAC5C;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;gBACvE,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;gBAChF,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;aAChD;iBAAM,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;gBAC5D,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;aAC1F;YAED,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;YAC7D,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,yEAAyE;gBACzE,2CAA2C;gBAC3C,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACzD,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,KAAK,CAAC,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;QACxE,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;QAEvD,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;oBACtD,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;oBAC1D,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;wBACF,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;4BACA,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,6EAA6E;oCAC7E,SAAS;iCACV;qCAAM;oCACL,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;4CACxB,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCAC9D;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;4BACxC,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAC5D,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;wBACF,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;gCAC1C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,+HAA+H;oBAC9H,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBACxD;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;oBAC1B,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;iBACrD;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;oBACjF,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;iBAC3D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;gBACvD,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;aACrF;YAED,IAAI,WAAkC,CAAC;YACvC,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;aACnD;YAAC,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;aAC1B;YACD,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;oBAC7B,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;wBAClD,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;iBACH;gBACD,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;aAC3C;iBAAM;gBACL,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;aACH;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;;IAE5B,MAAM,iBAAiB,GAAG,MAAA,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,mCAAI,EAAE,CAAC;IAC9E,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;QAC1C,WAAW,EAAE,MAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;QACnD,UAAU,EAAE,MAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;IAEF,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QACpD,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;QACb,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;YACF,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;gBAEF,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,MAAM,QAAQ,GAAG,kBAAkB,CAAC,CAAC,CAAC,SAAS,kBAAkB,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC;oBAC9E,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;oBACF,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CACF,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;yBACX,CAAC,CAAC;qBACJ;iBACF;qBAAM,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;oBAC9B,CAAC,CAAA,MAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;oBACA,oEAAoE;oBACpE,2BAA2B;oBAC3B,OAAO;iBACR;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,UAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,GAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;YAChE,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;gBAC3E,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED;;GAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC;IAEpC,2FAA2F;IAC3F,sDAAsD;IACtD,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,YAAY,EAAE,CAAC;QACnD,OAAO,MAAM,CAAC;KACf;IAED,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,iBAAiB,EAAE;QACrB,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KACnC;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IACD,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAI,MAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;IAEnD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;QACrC,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;KAC7C;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;QAErE,iEAAiE;QACjE,iCAAiC;QACjC,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;YACtD,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SAC1C;QAED,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1C;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,GACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,GACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,GACtB,eAAe,CAAC,eAAe,CACnC,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;KACxE;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,GAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;IAEF,MAAM,cAAc,qBACf,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KACzE;IAED,IAAI,iBAAiB,EAAE;QACrB,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;KAChD;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;IAEvD,IAAI,MAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;QAC1D,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACnE;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,MAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,MAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,MAAM,iBAAiB,GAAG,MAAA,kBAAkB,CAAC,OAAO,0CAAE,iBAAiB,CAAC;IACxE,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;SACtF;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;YAC3F,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;YAC1E,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,MAAM,oBAAoB,GAAG,CAC3B,GAAM,EAGN,EAAE;QACF,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YAC7C,KAAK,EAAE,SAAS;SACjB,CAEA,CAAC;IACJ,CAAC,CAAC;IAEF,IAAI,UAAU,EAAE;QACd,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,iCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,EAAE,EAAE,CAAC,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,MAAM,aAAa,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;gBAC9C,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,iCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,iCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB;IAEhB,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;QAC7B,MAAM,MAAM,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACxC,OAAO,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;YAC1B,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;YAClD,CAAC,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;KAChC;IAED,IAAI,OAAO,EAAE;QACX,OAAO,GAAG,OAAO,WAAW,CAAC;KAC9B;IACD,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback, isNode } from \"./util/utils\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n const scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map((scope) => new URL(scope).toString())\n : new URL(scopes).toString();\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js b/node_modules/@azure/core-http/dist-esm/src/url.js new file mode 100644 index 0000000000..35503a507f --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js @@ -0,0 +1,597 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +export class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js.map b/node_modules/@azure/core-http/dist-esm/src/url.js.map new file mode 100644 index 0000000000..2be3eb9c9b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../src/url.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH,MAAM,OAAO,QAAQ;IAArB;QACmB,cAAS,GAAwD,EAAE,CAAC;IAiIvF,CAAC;IA/HC;;OAEG;IACI,GAAG;QACR,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,IAAI;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;QACF,IAAI,aAAa,EAAE;YACjB,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;gBACnE,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;oBAChD,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,kBAAkB,CAAC,QAAQ,EAAE,CAAC;gBAClC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,GAAG,CAAC,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,GAAG,aAAa,IAAI,qBAAqB,EAAE,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAI,GAAG,aAAa,IAAI,cAAc,EAAE,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAED;;GAEG;AACH,MAAM,OAAO,UAAU;IAOrB;;;OAGG;IACI,SAAS,CAAC,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAiC;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACtD,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,OAAO,CAAC,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,QAAQ,CAAC,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B;QAC/E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,sBAAsB,CAAC,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,SAA6B,CAAC;YAClC,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACpC,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,8BAA8B,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,KAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,IAAI,CAAC,KAAK,EAAE,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,EAAE,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAED;;OAEG;IACI,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAMD,MAAM,OAAO,QAAQ;IACnB,YAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEhF,MAAM,CAAC,MAAM,CAAC,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,IAAI,CAAC,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,YAAY;IAMvB,YAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACtF,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,OAAO;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,IAAI;QACT,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,mCAAmC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;CACF;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,EAAE,EAAE,CAAC,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B;IACrF,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,EAAE,EAAE,CAAC,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js new file mode 100644 index 0000000000..1505a28cae --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map new file mode 100644 index 0000000000..87ae7d682c --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.js new file mode 100644 index 0000000000..f5fff422f9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map new file mode 100644 index 0000000000..f7ecda3cde --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../src/util/base64.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js b/node_modules/@azure/core-http/dist-esm/src/util/constants.js new file mode 100644 index 0000000000..445ed08624 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +export const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "2.2.5", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map new file mode 100644 index 0000000000..b4fa936750 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../src/util/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC;;GAEG;AACH,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB;;OAEG;IACH,eAAe,EAAE,OAAO;IAExB;;OAEG;IACH,IAAI,EAAE,OAAO;IAEb;;OAEG;IACH,KAAK,EAAE,QAAQ;IAEf;;OAEG;IACH,UAAU,EAAE,YAAY;IAExB;;OAEG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;WAEG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;YACpB,kBAAkB,EAAE,GAAG;SACxB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;WAIG;QACH,WAAW,EAAE,aAAa;QAE1B;;WAEG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"2.2.5\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/delay.js b/node_modules/@azure/core-http/dist-esm/src/util/delay.js new file mode 100644 index 0000000000..3e674a4538 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/delay.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { isDefined } from "./typeguards"; +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +//# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map b/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map new file mode 100644 index 0000000000..a8c70f0676 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/delay.js.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.js","sourceRoot":"","sources":["../../../src/util/delay.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAmB,MAAM,yBAAyB,CAAC;AACtE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAEzC,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;GAQG;AACH,MAAM,UAAU,KAAK,CACnB,SAAiB,EACjB,KAAS,EACT,OAGC;IAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,IAAI,KAAK,GAA8C,SAAS,CAAC;QACjE,IAAI,SAAS,GAA6B,SAAS,CAAC;QAEpD,MAAM,aAAa,GAAG,GAAS,EAAE;YAC/B,OAAO,MAAM,CACX,IAAI,UAAU,CAAC,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,EAAC,CAAC,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,CAAC,CAAC,CAAC,oBAAoB,CAAC,CACvF,CAAC;QACJ,CAAC,CAAC;QAEF,MAAM,eAAe,GAAG,GAAS,EAAE;YACjC,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,SAAS,EAAE;gBACrC,OAAO,CAAC,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;aAC7D;QACH,CAAC,CAAC;QAEF,SAAS,GAAG,GAAS,EAAE;YACrB,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;gBACpB,YAAY,CAAC,KAAK,CAAC,CAAC;aACrB;YACD,eAAe,EAAE,CAAC;YAClB,OAAO,aAAa,EAAE,CAAC;QACzB,CAAC,CAAC;QAEF,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,OAAO,CAAC,WAAW,CAAC,OAAO,EAAE;YACvD,OAAO,aAAa,EAAE,CAAC;SACxB;QAED,KAAK,GAAG,UAAU,CAAC,GAAG,EAAE;YACtB,eAAe,EAAE,CAAC;YAClB,OAAO,CAAC,KAAK,CAAC,CAAC;QACjB,CAAC,EAAE,SAAS,CAAC,CAAC;QAEd,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EAAE;YACxB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;SAC1D;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError, AbortSignalLike } from \"@azure/abort-controller\";\nimport { isDefined } from \"./typeguards\";\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds.\n * @param delayInMs - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @param options - The options for delay - currently abort options\n * @param abortSignal - The abortSignal associated with containing operation.\n * @param abortErrorMsg - The abort error message associated with containing operation.\n * @returns - Resolved promise\n */\nexport function delay(\n delayInMs: number,\n value?: T,\n options?: {\n abortSignal?: AbortSignalLike;\n abortErrorMsg?: string;\n }\n): Promise {\n return new Promise((resolve, reject) => {\n let timer: ReturnType | undefined = undefined;\n let onAborted: (() => void) | undefined = undefined;\n\n const rejectOnAbort = (): void => {\n return reject(\n new AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)\n );\n };\n\n const removeListeners = (): void => {\n if (options?.abortSignal && onAborted) {\n options.abortSignal.removeEventListener(\"abort\", onAborted);\n }\n };\n\n onAborted = (): void => {\n if (isDefined(timer)) {\n clearTimeout(timer);\n }\n removeListeners();\n return rejectOnAbort();\n };\n\n if (options?.abortSignal && options.abortSignal.aborted) {\n return rejectOnAbort();\n }\n\n timer = setTimeout(() => {\n removeListeners();\n resolve(value);\n }, delayInMs);\n\n if (options?.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", onAborted);\n }\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js new file mode 100644 index 0000000000..e888845023 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +export const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +export const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +export const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +export function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} +//# sourceMappingURL=exponentialBackoffStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map new file mode 100644 index 0000000000..944c0b477b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.js","sourceRoot":"","sources":["../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C,sBAAsB;AACtB,MAAM,CAAC,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AACvD,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AAC3D,MAAM,CAAC,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAE1D,MAAM,UAAU,QAAQ,CAAC,CAAU;IACjC,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB;IAElB,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;QAC/B,OAAO,KAAK,CAAC;KACd;IAED,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB;IAEhB,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;IAC/D,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;QAChC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../coreHttp\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js new file mode 100644 index 0000000000..65d8b5e2b7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const custom = {}; +//# sourceMappingURL=inspect.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map new file mode 100644 index 0000000000..edb11685a6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.js","sourceRoot":"","sources":["../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,MAAM,GAAG,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const custom = {};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js new file mode 100644 index 0000000000..53fba75515 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { inspect } from "util"; +export const custom = inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map new file mode 100644 index 0000000000..5ba871ce93 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.js","sourceRoot":"","sources":["../../../src/util/inspect.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAC;AAE/B,MAAM,CAAC,MAAM,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js new file mode 100644 index 0000000000..7683347f6a --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder, URLQuery } from "../url"; +import { isObject } from "./utils"; +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +export class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map new file mode 100644 index 0000000000..30d07b3a41 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.js","sourceRoot":"","sources":["../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAC9C,OAAO,EAAiB,QAAQ,EAAE,MAAM,SAAS,CAAC;AAkBlD,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;AAEhE,MAAM,OAAO,SAAS;IAIpB,YAAY,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,KAAuB,EAAE;QACzF,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;YACpD,CAAC,CAAC,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;YACtD,CAAC,CAAC,yBAAyB,CAAC;QAE9B,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;YAC5D,CAAC,CAAC,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;YAC9D,CAAC,CAAC,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;IAC5F,CAAC;IAEM,QAAQ,CAAC,GAAY;QAC1B,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,EAAE,EAAE;YAC9B,iEAAiE;YACjE,IAAI,KAAK,YAAY,KAAK,EAAE;gBAC1B,uCACK,KAAK,KACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,IACtB;aACH;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;gBACzB,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;aACrD;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;gBACxB,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;aAC1C;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;gBAC1B,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;aACnD;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;gBACzB,6BAA6B;gBAC7B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;gBAC7B,2BAA2B;gBAC3B,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;gBAClC,iEAAiE;gBACjE,mDAAmD;gBACnD,OAAO,SAAS,CAAC;aAClB;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;gBAClD,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;oBACnB,OAAO,YAAY,CAAC;iBACrB;gBACD,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;aACjB;YAED,OAAO,KAAK,CAAC;QACf,CAAC,EACD,CAAC,CACF,CAAC;IACJ,CAAC;IAEO,eAAe,CAAC,KAAoB;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IACnF,CAAC;IAEO,aAAa,CAAC,KAAoB;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACjF,CAAC;IAEO,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;aACnC;iBAAM;gBACL,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;aAC/B;SACF;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;IAEO,WAAW,CAAC,KAAa;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;YAC/C,OAAO,KAAK,CAAC;SACd;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAC3C,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,KAAK,CAAC;SACd;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QAC1C,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;YAC5B,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACrD,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;aAC9B;SACF;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;IAC/B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js new file mode 100644 index 0000000000..cc3bb61625 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=serializer.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map new file mode 100644 index 0000000000..c4c3d96bdf --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.js","sourceRoot":"","sources":["../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC;AAC/B;;GAEG;AACH,MAAM,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js new file mode 100644 index 0000000000..b9e5a38449 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +export const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map new file mode 100644 index 0000000000..42c8a66a0d --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.js","sourceRoot":"","sources":["../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;GAEG;AACH,MAAM,CAAC,MAAM,8BAA8B,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js new file mode 100644 index 0000000000..d686cab8d7 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} +//# sourceMappingURL=typeguards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map new file mode 100644 index 0000000000..580daf5241 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/typeguards.js.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.js","sourceRoot":"","sources":["../../../src/util/typeguards.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,UAAU,SAAS,CAAI,KAA2B;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if the value is not null or undefined.\n * @param thing - Anything\n * @internal\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js b/node_modules/@azure/core-http/dist-esm/src/util/utils.js new file mode 100644 index 0000000000..407e56c34b --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Constants } from "./constants"; +import { XML_ATTRKEY } from "./serializer.common"; +import { v4 as uuidv4 } from "uuid"; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export const isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map new file mode 100644 index 0000000000..5849d28cf6 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/util/utils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAIxC,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAClD,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAEpC,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAEnF;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,EAAE,EAAE;QAC1C,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;GAKG;AACH,wDAAwD;AACxD,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,wDAAwD;IACxD,OAAO,CAAC,EAAY,EAAQ,EAAE;QAC5B,OAAO;aACJ,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE;YAClB,0DAA0D;YAC1D,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QAC7B,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,0DAA0D;YAC1D,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CACtC,OAAuC;IAEvC,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,CAAC,EAAsB,EAAQ,EAAE;QACtC,OAAO;aACJ,IAAI,CAAC,CAAC,IAA2B,EAAE,EAAE;YACpC,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACnF,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,GAAU,EAAE,EAAE;YACpB,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CAAC,CAAC;IACP,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB;IAErB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IAED,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;QACrC,OAAO,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;KAC/B;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,CAAC,EAAE,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,CAAC,EAAE,YAAY,EAAE,CAAC;IAC1D,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,eAAwB,EAAE,WAAkB;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;IACF,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,EAAE,EAAE;QACjC,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,EAAE;YAChE,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QACnE,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAc;IAC5C,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;GAGG;AACH,MAAM,UAAU,QAAQ,CAAC,KAAc;IACrC,OAAO,CACL,OAAO,KAAK,KAAK,QAAQ;QACzB,KAAK,KAAK,IAAI;QACd,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;QACrB,CAAC,CAAC,KAAK,YAAY,MAAM,CAAC;QAC1B,CAAC,CAAC,KAAK,YAAY,IAAI,CAAC,CACzB,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js new file mode 100644 index 0000000000..fff2bf2aa9 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +if (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) { + throw new Error(`This library depends on the following DOM objects: ["document", "DOMParser", "Node", "XMLSerializer"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `); +} +let cachedDoc; +function getDoc() { + if (!cachedDoc) { + cachedDoc = document.implementation.createDocument(null, null, null); + } + return cachedDoc; +} +let cachedParser; +function getParser() { + if (!cachedParser) { + cachedParser = new DOMParser(); + } + return cachedParser; +} +let cachedSerializer; +function getSerializer() { + if (!cachedSerializer) { + cachedSerializer = new XMLSerializer(); + } + return cachedSerializer; +} +export function parseXML(str, opts = {}) { + var _a, _b, _c; + try { + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = getParser().parseFromString(str, "application/xml"); + throwIfError(dom); + let obj; + if (updatedOptions.includeRoot) { + obj = domToObject(dom, updatedOptions); + } + else { + obj = domToObject(dom.childNodes[0], updatedOptions); + } + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +let errorNS; +function getErrorNamespace() { + var _a; + if (errorNS === undefined) { + try { + errorNS = + (_a = getParser().parseFromString("INVALID", "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _a !== void 0 ? _a : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + errorNS = ""; + } + } + return errorNS; +} +function throwIfError(dom) { + const parserErrors = dom.getElementsByTagName("parsererror"); + if (parserErrors.length > 0 && getErrorNamespace()) { + for (let i = 0; i < parserErrors.length; i++) { + if (parserErrors[i].namespaceURI === errorNS) { + throw new Error(parserErrors[i].innerHTML); + } + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node, options) { + let result = {}; + const childNodeCount = node.childNodes.length; + const firstChildNode = node.childNodes[0]; + const onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + const elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result[XML_ATTRKEY] = {}; + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result[options.xmlCharKey] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject = domToObject(child, options); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +export function stringifyXML(content, opts = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "root", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0]; + return ('' + + getSerializer().serializeToString(dom)); +} +function buildAttributes(attrs) { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = getDoc().createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName, options) { + if (obj === undefined || + obj === null || + typeof obj === "string" || + typeof obj === "number" || + typeof obj === "boolean") { + const elem = getDoc().createElement(elementName); + elem.textContent = obj === undefined || obj === null ? "" : obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName, options)) { + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + const elem = getDoc().createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === XML_ATTRKEY) { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } + else if (key === options.xmlCharKey) { + elem.textContent = obj[key].toString(); + } + else { + for (const child of buildNode(obj[key], key, options)) { + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map new file mode 100644 index 0000000000..8301855238 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,IAAI,CAAC,IAAI,CAAC,QAAQ,IAAI,CAAC,IAAI,CAAC,SAAS,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;IAC1E,MAAM,IAAI,KAAK,CACb,oUAAoU,CACrU,CAAC;CACH;AAED,IAAI,SAA+B,CAAC;AACpC,SAAS,MAAM;IACb,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;KACtE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,IAAI,YAAmC,CAAC;AACxC,SAAS,SAAS;IAChB,IAAI,CAAC,YAAY,EAAE;QACjB,YAAY,GAAG,IAAI,SAAS,EAAE,CAAC;KAChC;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,IAAI,gBAA2C,CAAC;AAChD,SAAS,aAAa;IACpB,IAAI,CAAC,gBAAgB,EAAE;QACrB,gBAAgB,GAAG,IAAI,aAAa,EAAE,CAAC;KACxC;IACD,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,IAAI;QACF,MAAM,cAAc,GAAgC;YAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,EAAE;YAC7B,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;YACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;SAC3C,CAAC;QACF,MAAM,GAAG,GAAG,SAAS,EAAE,CAAC,eAAe,CAAC,GAAG,EAAE,iBAAiB,CAAC,CAAC;QAChE,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAI,GAAG,CAAC;QACR,IAAI,cAAc,CAAC,WAAW,EAAE;YAC9B,GAAG,GAAG,WAAW,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC;SACxC;aAAM;YACL,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;SACtD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAQ,EAAE;QACjB,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAA2B,CAAC;AAEhC,SAAS,iBAAiB;;IACxB,IAAI,OAAO,KAAK,SAAS,EAAE;QACzB,IAAI;YACF,OAAO;gBACL,MAAA,SAAS,EAAE,CAAC,eAAe,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;qBACtF,YAAa,mCAAI,EAAE,CAAC;SAC1B;QAAC,OAAO,OAAY,EAAE;YACrB,oFAAoF;YACpF,OAAO,GAAG,EAAE,CAAC;SACd;KACF;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,MAAM,YAAY,GAAG,GAAG,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC;IAC7D,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,IAAI,iBAAiB,EAAE,EAAE;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC,YAAY,KAAK,OAAO,EAAE;gBAC5C,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;aAC5C;SACF;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU,EAAE,OAAoC;IACnE,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,MAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,MAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,MAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC;QAEzB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,MAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SACrD;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,kBAAkB,CAAC;SACjD;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,MAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,MAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;gBACrD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB,EAAE,OAA0B,EAAE;;IACzE,MAAM,cAAc,GAAgC;QAClD,QAAQ,EAAE,MAAA,IAAI,CAAC,QAAQ,mCAAI,MAAM;QACjC,WAAW,EAAE,MAAA,IAAI,CAAC,WAAW,mCAAI,KAAK;QACtC,UAAU,EAAE,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW;KAC3C,CAAC;IACF,MAAM,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,cAAc,CAAC,QAAQ,EAAE,cAAc,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3E,OAAO,CACL,yDAAyD;QACzD,aAAa,EAAE,CAAC,iBAAiB,CAAC,GAAG,CAAC,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,MAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB,EAAE,OAAoC;IACpF,IACE,GAAG,KAAK,SAAS;QACjB,GAAG,KAAK,IAAI;QACZ,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,QAAQ;QACvB,OAAO,GAAG,KAAK,SAAS,EACxB;QACA,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,IAAI,CAAC,WAAW,GAAG,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QAC3E,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,MAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAK,MAAM,SAAS,IAAI,GAAG,EAAE;YAC3B,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,SAAS,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE;gBAC9D,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QACjD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI,GAAG,KAAK,WAAW,EAAE;gBACvB,KAAK,MAAM,IAAI,IAAI,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM,IAAI,GAAG,KAAK,OAAO,CAAC,UAAU,EAAE;gBACrC,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;aACxC;iBAAM;gBACL,KAAK,MAAM,KAAK,IAAI,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE,OAAO,CAAC,EAAE;oBACrD,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,EAAE,CAAC,CAAC;KAChE;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\nif (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) {\n throw new Error(\n `This library depends on the following DOM objects: [\"document\", \"DOMParser\", \"Node\", \"XMLSerializer\"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `\n );\n}\n\nlet cachedDoc: Document | undefined;\nfunction getDoc(): Document {\n if (!cachedDoc) {\n cachedDoc = document.implementation.createDocument(null, null, null);\n }\n return cachedDoc;\n}\n\nlet cachedParser: DOMParser | undefined;\nfunction getParser(): DOMParser {\n if (!cachedParser) {\n cachedParser = new DOMParser();\n }\n return cachedParser;\n}\n\nlet cachedSerializer: XMLSerializer | undefined;\nfunction getSerializer(): XMLSerializer {\n if (!cachedSerializer) {\n cachedSerializer = new XMLSerializer();\n }\n return cachedSerializer;\n}\n\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n try {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = getParser().parseFromString(str, \"application/xml\");\n throwIfError(dom);\n\n let obj;\n if (updatedOptions.includeRoot) {\n obj = domToObject(dom, updatedOptions);\n } else {\n obj = domToObject(dom.childNodes[0], updatedOptions);\n }\n\n return Promise.resolve(obj);\n } catch (err: any) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS: string | undefined;\n\nfunction getErrorNamespace(): string {\n if (errorNS === undefined) {\n try {\n errorNS =\n getParser().parseFromString(\"INVALID\", \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n } catch (ignored: any) {\n // Most browsers will return a document containing , but IE will throw.\n errorNS = \"\";\n }\n }\n return errorNS;\n}\n\nfunction throwIfError(dom: Document): void {\n const parserErrors = dom.getElementsByTagName(\"parsererror\");\n if (parserErrors.length > 0 && getErrorNamespace()) {\n for (let i = 0; i < parserErrors.length; i++) {\n if (parserErrors[i].namespaceURI === errorNS) {\n throw new Error(parserErrors[i].innerHTML);\n }\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node, options: Required): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[XML_ATTRKEY] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[options.xmlCharKey] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child, options);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\nexport function stringifyXML(content: unknown, opts: SerializerOptions = {}): string {\n const updatedOptions: Required = {\n rootName: opts.rootName ?? \"root\",\n includeRoot: opts.includeRoot ?? false,\n xmlCharKey: opts.xmlCharKey ?? XML_CHARKEY,\n };\n const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0];\n return (\n '' +\n getSerializer().serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = getDoc().createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string, options: Required): Node[] {\n if (\n obj === undefined ||\n obj === null ||\n typeof obj === \"string\" ||\n typeof obj === \"number\" ||\n typeof obj === \"boolean\"\n ) {\n const elem = getDoc().createElement(elementName);\n elem.textContent = obj === undefined || obj === null ? \"\" : obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName, options)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = getDoc().createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === XML_ATTRKEY) {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else if (key === options.xmlCharKey) {\n elem.textContent = obj[key].toString();\n } else {\n for (const child of buildNode(obj[key], key, options)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.js new file mode 100644 index 0000000000..230f0c2278 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as xml2js from "xml2js"; +import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map new file mode 100644 index 0000000000..f1aff151b0 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../src/util/xml.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAqB,WAAW,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElF,qIAAqI;AACrI,mGAAmG;AACnG,8EAA8E;AAC9E,+GAA+G;AAC/G,MAAM,sBAAsB,GAAqB;IAC/C,eAAe,EAAE,KAAK;IACtB,IAAI,EAAE,KAAK;IACX,SAAS,EAAE,KAAK;IAChB,aAAa,EAAE,KAAK;IACpB,OAAO,EAAE,WAAW;IACpB,aAAa,EAAE,IAAI;IACnB,WAAW,EAAE,KAAK;IAClB,UAAU,EAAE,KAAK;IACjB,YAAY,EAAE,IAAI;IAClB,SAAS,EAAE,SAAS;IACpB,KAAK,EAAE,KAAK;IACZ,gBAAgB,EAAE,KAAK;IACvB,qBAAqB,EAAE,KAAK;IAC5B,QAAQ,EAAE,IAAI;IACd,eAAe,EAAE,KAAK;IACtB,iBAAiB,EAAE,KAAK;IACxB,KAAK,EAAE,KAAK;IACZ,MAAM,EAAE,IAAI;IACZ,kBAAkB,EAAE,SAAS;IAC7B,mBAAmB,EAAE,SAAS;IAC9B,iBAAiB,EAAE,SAAS;IAC5B,eAAe,EAAE,SAAS;IAC1B,QAAQ,EAAE,MAAM;IAChB,MAAM,EAAE;QACN,OAAO,EAAE,KAAK;QACd,QAAQ,EAAE,OAAO;QACjB,UAAU,EAAE,IAAI;KACjB;IACD,OAAO,EAAE,SAAS;IAClB,UAAU,EAAE;QACV,MAAM,EAAE,IAAI;QACZ,MAAM,EAAE,IAAI;QACZ,OAAO,EAAE,IAAI;KACd;IACD,QAAQ,EAAE,KAAK;IACf,SAAS,EAAE,KAAK;IAChB,QAAQ,EAAE,EAAE;IACZ,KAAK,EAAE,KAAK;CACb,CAAC;AAEF,0DAA0D;AAC1D,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C,2DAA2D;AAC3D,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;IACjC,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE;;IACrE,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;IAC1D,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,MAAA,IAAI,CAAC,UAAU,mCAAI,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;gBACtC,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js b/node_modules/@azure/core-http/dist-esm/src/webResource.js new file mode 100644 index 0000000000..3fc3d27e54 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js @@ -0,0 +1,264 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js.map b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map new file mode 100644 index 0000000000..e5ed93ce92 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../src/webResource.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAChF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAOlD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AAuJ5C,MAAM,UAAU,iBAAiB,CAAC,MAAe;IAC/C,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;QACF,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;YAClC,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;YACrC,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;YACtC,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;YACrC,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;YAC1D,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;YACxC,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;YACA,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;GAKG;AACH,MAAM,OAAO,WAAW;IAsGtB,YACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC;QAEvC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;QAC3D,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;IAChF,CAAC;IAED;;;;OAIG;IACH,yBAAyB;QACvB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,OAAO,CAAC,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACrD,CAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YACxB,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;YACjD,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;YACpD,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,0EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;wBAC/E,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,gCAAgC,aAAa,EAAE;4BAC1E,8CAA8C,yBAAyB,GAAG;4BAC1E,0EAA0E,aAAa,6BAA6B;4BACpH,wCAAwC,aAAa,6DAA6D,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,mEAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,qFAAqF;oBACrF,2IAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,MAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,mEAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBACrD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;YACvD,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;SACxC;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;SAC9C;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QAEjD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,KAAK;QACH,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js new file mode 100644 index 0000000000..2e3b259f37 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "./httpHeaders"; +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient { + sendRequest(request) { + var _a; + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + return Promise.reject(new AbortError("The operation was aborted.")); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key, value) => { + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) || request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if (request.streamResponseBody || ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + // Blob.text() is not supported in IE so using FileReader instead + const reader = new FileReader(); + reader.onload = function (e) { + var _a; + const text = (_a = e.target) === null || _a === void 0 ? void 0 : _a.result; + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + }; + reader.onerror = function (_e) { + rej(reader.error); + }; + reader.readAsText(xhr.response, "UTF-8"); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, RestError.REQUEST_SEND_ERROR, undefined, request))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map new file mode 100644 index 0000000000..3d02017777 --- /dev/null +++ b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAGrD,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,OAAO,aAAa;IACjB,WAAW,CAAC,OAAwB;;QACzC,MAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,MAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAI,WAAW,CAAC,OAAO,EAAE;gBACvB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC,CAAC;aACrE;YAED,MAAM,QAAQ,GAAG,GAAS,EAAE;gBAC1B,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;gBAC5C,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,MAAM,WAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,EAAQ,EAAE;gBACxD,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;oBACA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;gBAC3C,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,WAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,MAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QAED,GAAG,CAAC,YAAY;YACd,CAAA,MAAA,OAAO,CAAC,yBAAyB,0CAAE,IAAI,KAAI,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAE1F,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,GAAG,CAAC,YAAY,KAAK,MAAM,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,kBAAkB,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;gBAClD,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE,CAChC,OAAO,CAAC;oBACN,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;iBAC7B,CAAC,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF;AAED,SAAS,kBAAkB,CACzB,GAAmB,EACnB,OAAwB,EACxB,GAAgF,EAChF,GAA2B;IAE3B,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE,GAAG,EAAE;;QAC5C,wCAAwC;QACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;YACtD,IAAI,OAAO,CAAC,kBAAkB,KAAI,MAAA,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA,EAAE;gBACpF,MAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;oBACrD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;wBAChC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;oBACxB,CAAC,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;gBAC9C,CAAC,CAAC,CAAC;gBACH,GAAG,CAAC;oBACF,OAAO;oBACP,MAAM,EAAE,GAAG,CAAC,MAAM;oBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;oBAC1B,QAAQ;iBACT,CAAC,CAAC;aACJ;iBAAM;gBACL,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE,GAAG,EAAE;oBAChC,uFAAuF;oBACvF,sEAAsE;oBACtE,oDAAoD;oBACpD,IAAI,GAAG,CAAC,QAAQ,EAAE;wBAChB,iEAAiE;wBACjE,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;wBAChC,MAAM,CAAC,MAAM,GAAG,UAAU,CAAC;;4BACzB,MAAM,IAAI,GAAG,MAAA,CAAC,CAAC,MAAM,0CAAE,MAAgB,CAAC;4BACxC,GAAG,CAAC;gCACF,OAAO;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,UAAU,EAAE,IAAI;6BACjB,CAAC,CAAC;wBACL,CAAC,CAAC;wBACF,MAAM,CAAC,OAAO,GAAG,UAAU,EAAE;4BAC3B,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;wBACpB,CAAC,CAAC;wBACF,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;qBAC1C;yBAAM;wBACL,GAAG,CAAC;4BACF,OAAO;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;yBAC3B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,CAAC,QAAQ,EAAE,EAAE,CAC5C,QAAQ,CAAC;YACP,WAAW,EAAE,QAAQ,CAAC,MAAM;SAC7B,CAAC,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,MAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,MAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;QAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,MAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CACjC,MAAM,CACJ,IAAI,SAAS,CACX,6BAA6B,OAAO,CAAC,GAAG,EAAE,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF,CACF,CAAC;IACF,MAAM,UAAU,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;IAChE,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;IACxD,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE,GAAG,EAAE,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAC5D,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n if (abortSignal.aborted) {\n return Promise.reject(new AbortError(\"The operation was aborted.\"));\n }\n\n const listener = (): void => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n\n xhr.responseType =\n request.streamResponseStatusCodes?.size || request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (xhr.responseType === \"blob\") {\n return new Promise((resolve, reject) => {\n handleBlobResponse(xhr, request, resolve, reject);\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction handleBlobResponse(\n xhr: XMLHttpRequest,\n request: WebResourceLike,\n res: (value: HttpOperationResponse | PromiseLike) => void,\n rej: (reason?: any) => void\n): void {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n if (request.streamResponseBody || request.streamResponseStatusCodes?.has(xhr.status)) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n } else {\n xhr.addEventListener(\"load\", () => {\n // xhr.response is of Blob type if the request is sent with xhr.responseType === \"blob\"\n // but the status code is not one of the stream response status codes,\n // so treat it as text and convert from Blob to text\n if (xhr.response) {\n // Blob.text() is not supported in IE so using FileReader instead\n const reader = new FileReader();\n reader.onload = function (e) {\n const text = e.target?.result as string;\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: text,\n });\n };\n reader.onerror = function (_e) {\n rej(reader.error);\n };\n reader.readAsText(xhr.response, \"UTF-8\");\n } else {\n res({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n });\n }\n });\n }\n }\n });\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n): void {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n): void {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n const abortError = new AbortError(\"The operation was aborted.\");\n xhr.addEventListener(\"abort\", () => reject(abortError));\n xhr.addEventListener(\"timeout\", () => reject(abortError));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist/index.js b/node_modules/@azure/core-http/dist/index.js new file mode 100644 index 0000000000..d2670e369b --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js @@ -0,0 +1,5536 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var uuid = require('uuid'); +var util = require('util'); +var tslib = require('tslib'); +var xml2js = require('xml2js'); +var abortController = require('@azure/abort-controller'); +var logger$1 = require('@azure/logger'); +var coreAuth = require('@azure/core-auth'); +var os = require('os'); +var http = require('http'); +var https = require('https'); +var tough = require('tough-cookie'); +var tunnel = require('tunnel'); +var stream = require('stream'); +var FormData = require('form-data'); +var node_fetch = require('node-fetch'); +var coreTracing = require('@azure/core-tracing'); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.rawHeaders === "function" && + typeof castObject.clone === "function" && + typeof castObject.get === "function" && + typeof castObject.set === "function" && + typeof castObject.contains === "function" && + typeof castObject.remove === "function" && + typeof castObject.headersArray === "function" && + typeof castObject.headerValues === "function" && + typeof castObject.headerNames === "function" && + typeof castObject.toJson === "function") { + return true; + } + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + /** + * The core-http version + */ + coreHttpVersion: "2.2.5", + /** + * Specifies HTTP. + */ + HTTP: "http:", + /** + * Specifies HTTPS. + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + ServiceUnavailable: 503, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +const XML_CHARKEY = "_"; + +// Copyright (c) Microsoft Corporation. +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +const isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +function stripResponse(response) { + const strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +function stripRequest(request) { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +// eslint-disable-next-line @typescript-eslint/ban-types +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (cb) => { + promise + .then((data) => { + // eslint-disable-next-line promise/no-callback-in-promise + return cb(undefined, data); + }) + .catch((err) => { + // eslint-disable-next-line promise/no-callback-in-promise + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb) => { + promise + .then((data) => { + return process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }) + .catch((err) => { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtorParam, sourceCtors) { + const castTargetCtorParam = targetCtorParam; + sourceCtors.forEach((sourceCtor) => { + Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { + castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; + }); + }); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +function isObject(input) { + return (typeof input === "object" && + input !== null && + !Array.isArray(input) && + !(input instanceof RegExp) && + !(input instanceof Date)); +} + +// Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +class Serializer { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value != undefined) { + const valueAsNumber = value; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + const valueAsArray = value; + if (MaxItems != undefined && valueAsArray.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && valueAsArray.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && valueAsArray.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && valueAsArray.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper, object, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper, responseBody, objectName, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xmlCharKey; + const castResponseBody = responseBody; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (castResponseBody[XML_ATTRKEY] != undefined && + castResponseBody[xmlCharKey] != undefined) { + responseBody = castResponseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = encodeByteArray(value); + } + return returnValue; +} +function serializeBase64UrlType(objectName, value) { + let returnValue = ""; + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + returnValue = bufferToBase64Url(value) || ""; + } + return returnValue; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by `className`. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + * @param objectName - Name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - The serializer containing the entire set of mappers + * @param mapper - The composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && + (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName != undefined) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_a = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _a !== void 0 ? _a : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +function serializeObject(toSerialize) { + const castToSerialize = toSerialize; + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + const dictionary = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(castToSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + const result = {}; + for (const key of o) { + result[key] = key; + } + return result; +} +/** + * String enum containing the string types of property mappers. + */ +// eslint-disable-next-line @typescript-eslint/no-redeclare +const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. +function isWebResourceLike(object) { + if (object && typeof object === "object") { + const castObject = object; + if (typeof castObject.url === "string" && + typeof castObject.method === "string" && + typeof castObject.headers === "object" && + isHttpHeadersLike(castObject.headers) && + typeof castObject.validateRequestProperties === "function" && + typeof castObject.prepare === "function" && + typeof castObject.clone === "function") { + return true; + } + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +class WebResource { + constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { + this.streamResponseBody = streamResponseBody; + this.streamResponseStatusCodes = streamResponseStatusCodes; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.decompressResponse = decompressResponse; + this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties() { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method === undefined || + options.method === null || + typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate === undefined || + options.pathTemplate === null || + typeof options.pathTemplate.valueOf() !== "string") && + (options.url === undefined || + options.url === null || + typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = pathParameters[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); + throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in parameters: ${stringifiedPathParameters}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); + } + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } + else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", this.requestId); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly + this.body = options.body; + if (options.body !== undefined && options.body !== null) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + if (options.spanOptions) { + this.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + this.tracingContext = options.tracingContext; + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + return this; + } + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone() { + const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A class that handles the query portion of a URLBuilder. + */ +class URLQuery { + constructor() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any() { + return Object.keys(this._rawQuery).length > 0; + } + /** + * Get the keys of the query string. + */ + keys() { + return Object.keys(this._rawQuery); + } + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName, parameterValue) { + const caseParameterValue = parameterValue; + if (parameterName) { + if (caseParameterValue !== undefined && caseParameterValue !== null) { + const newValue = Array.isArray(caseParameterValue) + ? caseParameterValue + : caseParameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + } + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString() { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } + else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + /** + * Parse a URLQuery from the provided text. + */ + static parse(text) { + const result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + let currentState = "ParameterName"; + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + } +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +class URLBuilder { + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + } + /** + * Get the scheme that has been set in this URL. + */ + getScheme() { + return this._scheme; + } + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + } + /** + * Get the host that has been set in this URL. + */ + getHost() { + return this._host; + } + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port) { + if (port === undefined || port === null || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + } + /** + * Get the port that has been set in this URL. + */ + getPort() { + return this._port; + } + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path) { + if (!path) { + this._path = undefined; + } + else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + } + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path) { + if (path) { + let currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + /** + * Get the path that has been set in this URL. + */ + getPath() { + return this._path; + } + /** + * Set the query in this URL. + */ + setQuery(query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + } + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + } + /** + * Get the query in this URL. + */ + getQuery() { + return this._query ? this._query.toString() : undefined; + } + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + set(text, startState) { + const tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + const token = tokenizer.current(); + let tokenPath; + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString() { + let result = ""; + if (this._scheme) { + result += `${this._scheme}://`; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += `:${this._port}`; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + return result; + } + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text) { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} +class URLToken { + constructor(text, type) { + this.text = text; + this.type = type; + } + static scheme(text) { + return new URLToken(text, "SCHEME"); + } + static host(text) { + return new URLToken(text, "HOST"); + } + static port(text) { + return new URLToken(text, "PORT"); + } + static path(text) { + return new URLToken(text, "PATH"); + } + static query(text) { + return new URLToken(text, "QUERY"); + } +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + const characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +class URLTokenizer { + constructor(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current() { + return this._currentToken; + } + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next() { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + let endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + let result = ""; + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer, ...terminatingCharacters) { + return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); +} +function nextScheme(tokenizer) { + const scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + const host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + const port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + const path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + const query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + +// Copyright (c) Microsoft Corporation. +const RedactedString = "REDACTED"; +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +class Sanitizer { + constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { + allowedHeaderNames = Array.isArray(allowedHeaderNames) + ? defaultAllowedHeaderNames.concat(allowedHeaderNames) + : defaultAllowedHeaderNames; + allowedQueryParameters = Array.isArray(allowedQueryParameters) + ? defaultAllowedQueryParameters.concat(allowedQueryParameters) + : defaultAllowedQueryParameters; + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "_headersMap") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(value) { + return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); + } + sanitizeQuery(value) { + return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); + } + sanitizeObject(value, allowedKeys, accessor) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (allowedKeys.has(k.toLowerCase())) { + sanitized[k] = accessor(value, k); + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const urlBuilder = URLBuilder.parse(value); + const queryString = urlBuilder.getQuery(); + if (!queryString) { + return value; + } + const query = URLQuery.parse(queryString); + for (const k of query.keys()) { + if (!this.allowedQueryParameters.has(k.toLowerCase())) { + query.set(k, RedactedString); + } + } + urlBuilder.setQuery(query.toString()); + return urlBuilder.toString(); + } +} + +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + +// Copyright (c) Microsoft Corporation. +const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ +class RestError extends Error { + constructor(message, code, statusCode, request, response) { + super(message); + this.name = "RestError"; + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; + +// Copyright (c) Microsoft Corporation. +const logger = logger$1.createClientLogger("core-http"); + +// Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} +class ReportTransform extends stream.Transform { + constructor(progressCallback) { + super(); + this.progressCallback = progressCallback; + this.loadedBytes = 0; + } + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(undefined); + } +} +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + async sendRequest(httpRequest) { + var _a; + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); + } + const abortController$1 = new abortController.AbortController(); + let abortListener; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new abortController.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController$1.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(() => { + abortController$1.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + const formData = httpRequest.formData; + const requestForm = new FormData__default["default"](); + const appendFormValue = (key, value) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && + Object.prototype.hasOwnProperty.call(value, "value") && + Object.prototype.hasOwnProperty.call(value, "options")) { + requestForm.append(key, value.value, value.options); + } + else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + const onUploadProgress = httpRequest.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const platformSpecificRequestInit = await this.prepareRequest(httpRequest); + const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); + let operationResponse; + try { + const response = await this.fetch(httpRequest.url, requestInit); + const headers = parseHeaders(response.headers); + const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || + httpRequest.streamResponseBody; + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: streaming + ? response.body + : undefined, + bodyAsText: !streaming ? await response.text() : undefined, + }; + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + const length = parseInt(headers.get("Content-Length")) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + await this.processRequest(operationResponse); + return operationResponse; + } + catch (error) { + const fetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new abortController.AbortError("The operation was aborted."); + } + throw fetchError; + } + finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + getOrCreateAgent(httpRequest) { + var _a; + const isHttps = isUrlHttps(httpRequest.url); + // At the moment, proxy settings and keepAlive are mutually + // exclusive because the 'tunnel' library currently lacks the + // ability to create a proxy with keepAlive turned on. + if (httpRequest.proxySettings) { + const { host, port, username, password } = httpRequest.proxySettings; + const key = `${host}:${port}:${username}:${password}`; + const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; + let agent = getCachedAgent(isHttps, proxyAgents); + if (agent) { + return agent; + } + const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + agent = tunnel.agent; + if (tunnel.isHttps) { + proxyAgents.httpsAgent = tunnel.agent; + } + else { + proxyAgents.httpAgent = tunnel.agent; + } + this.proxyAgentMap.set(key, proxyAgents); + return agent; + } + else if (httpRequest.keepAlive) { + let agent = getCachedAgent(isHttps, this.keepAliveAgents); + if (agent) { + return agent; + } + const agentOptions = { + keepAlive: httpRequest.keepAlive, + }; + if (isHttps) { + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); + } + else { + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); + } + return agent; + } + else { + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; + } + } + /** + * Uses `node-fetch` to perform the request. + */ + // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs + async fetch(input, init) { + return node_fetch__default["default"](input, init); + } + /** + * Prepares a request based on the provided web resource. + */ + async prepareRequest(httpRequest) { + const requestInit = {}; + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + }); + httpRequest.headers.set("Cookie", cookieString); + } + // Set the http(s) agent + requestInit.agent = this.getOrCreateAgent(httpRequest); + requestInit.compress = httpRequest.decompressResponse; + return requestInit; + } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + async processRequest(operationResponse) { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader !== undefined) { + await new Promise((resolve, reject) => { + this.cookieJar.setCookie(setCookieHeader, operationResponse.request.url, { ignoreError: true }, (err) => { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); + } + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +function operationOptionsToRequestOptionsBase(opts) { + const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); + let result = additionalOptions; + if (requestOptions) { + result = Object.assign(Object.assign({}, result), requestOptions); + } + if (tracingOptions) { + result.tracingContext = tracingOptions.tracingContext; + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +class RequestPolicyOptions { + constructor(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed +// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 +// By creating a new copy of the settings each time we instantiate the parser, +// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. +const xml2jsDefaultOptionsV2 = { + explicitCharkey: false, + trim: false, + normalize: false, + normalizeTags: false, + attrkey: XML_ATTRKEY, + explicitArray: true, + ignoreAttrs: false, + mergeAttrs: false, + explicitRoot: true, + validator: undefined, + xmlns: false, + explicitChildren: false, + preserveChildrenOrder: false, + childkey: "$$", + charsAsChildren: false, + includeWhiteChars: false, + async: false, + strict: true, + attrNameProcessors: undefined, + attrValueProcessors: undefined, + tagNameProcessors: undefined, + valueProcessors: undefined, + rootName: "root", + xmldec: { + version: "1.0", + encoding: "UTF-8", + standalone: true, + }, + doctype: undefined, + renderOpts: { + pretty: true, + indent: " ", + newline: "\n", + }, + headless: false, + chunkSize: 10000, + emptyTag: "", + cdata: false, +}; +// The xml2js settings for general XML parsing operations. +const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsParserSettings.explicitArray = false; +// The xml2js settings for general XML building operations. +const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); +xml2jsBuilderSettings.explicitArray = false; +xml2jsBuilderSettings.renderOpts = { + pretty: false, +}; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +function stringifyXML(obj, opts = {}) { + var _a; + xml2jsBuilderSettings.rootName = opts.rootName; + xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); + return builder.buildObject(obj); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +function parseXML(str, opts = {}) { + var _a; + xml2jsParserSettings.explicitRoot = !!opts.includeRoot; + xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, (err, res) => { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes, parsingOptions) { + return { + create: (nextPolicy, options) => { + return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); + }, + }; +} +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +const DefaultDeserializationOptions = { + expectedContentTypes: { + json: defaultJsonContentTypes, + xml: defaultXmlContentTypes, + }, +}; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +class DeserializationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { + var _a; + super(nextPolicy, requestPolicyOptions); + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + } + async sendRequest(request) { + return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { + xmlCharKey: this.xmlCharKey, + })); + } +} +function getOperationResponse(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationSpec = request.operationSpec; + if (operationSpec) { + const operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const shouldDeserialize = parsedResponse.request.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { + var _a, _b, _c; + const updatedOptions = { + rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }; + return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationSpec = parsedResponse.request.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponse(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (innerError) { + const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders", options); + } + } + return parsedResponse; + }); +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || + parsedResponse.request.streamResponseBody; + const initialErrorMessage = streaming + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let parsedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; + } + parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); + } + const internalError = parsedBody.error || parsedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = parsedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + var _a; + const errorHandler = (err) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); + return Promise.reject(e); + }; + const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || + operationResponse.request.streamResponseBody; + if (!streaming && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text, opts) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const DEFAULT_CLIENT_RETRY_COUNT = 3; +// intervals are in ms +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +function isNumber(n) { + return typeof n === "number"; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(retryLimit, predicate, retryData, response, error) { + if (!predicate(response, error)) { + return false; + } + return retryData.retryCount < retryLimit; +} +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; + const boundedRandDelta = retryOptions.retryInterval * 0.8 + + Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); + return retryData; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +function isDefined(thing) { + return typeof thing !== "undefined" && thing !== null; +} + +// Copyright (c) Microsoft Corporation. +const StandardAbortMessage$1 = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (isDefined(timer)) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} + +// Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); + }, + }; +} +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; +(function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; +})(exports.RetryMode || (exports.RetryMode = {})); +const DefaultRetryOptions = { + maxRetries: DEFAULT_CLIENT_RETRY_COUNT, + retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, +}; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); + } +} +async function retry$1(policy, request, response, retryData, requestError) { + function shouldPolicyRetry(responseParam) { + const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; + if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { + return false; + } + if (statusCode === undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + return true; + } + retryData = updateRetryData({ + retryInterval: policy.retryInterval, + minRetryInterval: 0, + maxRetryInterval: policy.maxRetryInterval, + }, retryData, requestError); + const isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { + logger.info(`Retrying request in ${retryData.retryInterval}`); + try { + await delay(retryData.retryInterval); + const res = await policy._nextPolicy.sendRequest(request.clone()); + return retry$1(policy, request, res, retryData); + } + catch (err) { + return retry$1(policy, request, response, retryData, err); + } + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + throw err; + } + else { + return response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + create: (nextPolicy, options) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _requestIdHeaderName) { + super(nextPolicy, options); + this._requestIdHeaderName = _requestIdHeaderName; + } + sendRequest(request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, request.requestId); + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; +} + +// Copyright (c) Microsoft Corporation. +function ndJsonPolicy() { + return { + create: (nextPolicy, options) => { + return new NdJsonPolicy(nextPolicy, options); + }, + }; +} +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; + } + sendRequest(request) { + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +function rpRegistrationPolicy(retryTimeout = 30) { + return { + create: (nextPolicy, options) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +class RPRegistrationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _retryTimeout = 30) { + super(nextPolicy, options); + this._retryTimeout = _retryTimeout; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param originalRequest - The original request + * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. + * @returns A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo = false) { + const reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param body - The response body received after making the original request. + * @returns The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param url - The original request url + * @returns The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} +/** + * Registers the given provider. + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param provider - The provider name to be registered. + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + */ +async function registerRP(policy, urlPrefix, provider, originalRequest) { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + const response = await policy._nextPolicy.sendRequest(reqOptions); + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param policy - The RPRegistrationPolicy this function is being called against. + * @param url - The request url for polling + * @param originalRequest - The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns True if RP Registration is successful. + */ +async function getRegistrationStatus(policy, url, originalRequest) { + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + const res = await policy._nextPolicy.sendRequest(reqOptions); + const obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + await delay(policy._retryTimeout * 1000); + return getRegistrationStatus(policy, url, originalRequest); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +function signingPolicy(authenticationProvider) { + return { + create: (nextPolicy, options) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: (nextPolicy, options) => { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +class SystemErrorRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + super(nextPolicy, options); + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} +async function retry(policy, request, operationResponse, err, retryData) { + retryData = updateRetryData(policy, retryData, err); + function shouldPolicyRetry(_response, error) { + if (error && + error.code && + (error.code === "ETIMEDOUT" || + error.code === "ESOCKETTIMEDOUT" || + error.code === "ECONNREFUSED" || + error.code === "ECONNRESET" || + error.code === "ENOENT")) { + return true; + } + return false; + } + if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } + catch (nestedErr) { + return retry(policy, request, operationResponse, nestedErr, retryData); + } + } + else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Maximum number of retries for the throttling retry policy + */ +const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; + +// Copyright (c) Microsoft Corporation. +const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +function throttlingRetryPolicy() { + return { + create: (nextPolicy, options) => { + return new ThrottlingRetryPolicy(nextPolicy, options); + }, + }; +} +const StandardAbortMessage = "The operation was aborted."; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +class ThrottlingRetryPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, _handleResponse) { + super(nextPolicy, options); + this.numberOfRetries = 0; + this._handleResponse = _handleResponse || this._defaultResponseHandler; + } + async sendRequest(httpRequest) { + const response = await this._nextPolicy.sendRequest(httpRequest.clone()); + if (response.status !== StatusCodes.TooManyRequests && + response.status !== StatusCodes.ServiceUnavailable) { + return response; + } + else { + return this._handleResponse(httpRequest, response); + } + } + async _defaultResponseHandler(httpRequest, httpResponse) { + var _a; + const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (retryAfterHeader) { + const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (delayInMs) { + this.numberOfRetries += 1; + await delay(delayInMs, undefined, { + abortSignal: httpRequest.abortSignal, + abortErrorMsg: StandardAbortMessage, + }); + if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw new abortController.AbortError(StandardAbortMessage); + } + if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { + return this.sendRequest(httpRequest); + } + else { + return this._nextPolicy.sendRequest(httpRequest); + } + } + } + return httpResponse; + } + static parseRetryAfterHeader(headerValue) { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + } + static parseDateRetryAfterHeader(headerValue) { + try { + const now = Date.now(); + const date = Date.parse(headerValue); + const diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + } +} + +// Copyright (c) Microsoft Corporation. +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "", + namespace: "", +}); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +function tracingPolicy(tracingOptions = {}) { + return { + create(nextPolicy, options) { + return new TracingPolicy(nextPolicy, options, tracingOptions); + }, + }; +} +/** + * A policy that wraps outgoing requests with a tracing span. + */ +class TracingPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, tracingOptions) { + super(nextPolicy, options); + this.userAgent = tracingOptions.userAgent; + } + async sendRequest(request) { + if (!request.tracingContext) { + return this._nextPolicy.sendRequest(request); + } + const span = this.tryCreateSpan(request); + if (!span) { + return this._nextPolicy.sendRequest(request); + } + try { + const response = await this._nextPolicy.sendRequest(request); + this.tryProcessResponse(span, response); + return response; + } + catch (err) { + this.tryProcessError(span, err); + throw err; + } + } + tryCreateSpan(request) { + var _a; + try { + // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. + // We can pass this as a separate parameter once we upgrade to the latest core-tracing. + const { span } = createSpan(`HTTP ${request.method}`, { + tracingOptions: { + spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), + tracingContext: request.tracingContext, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); + if (typeof namespaceFromContext === "string") { + span.setAttribute("az.namespace", namespaceFromContext); + } + span.setAttributes({ + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }); + if (this.userAgent) { + span.setAttribute("http.user_agent", this.userAgent); + } + // set headers + const spanContext = span.spanContext(); + const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); + if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { + request.headers.set("traceparent", traceParentHeader); + const traceState = spanContext.traceState && spanContext.traceState.serialize(); + // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent + if (traceState) { + request.headers.set("tracestate", traceState); + } + } + return span; + } + catch (error) { + logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); + return undefined; + } + } + tryProcessError(span, err) { + try { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: err.message, + }); + if (err.statusCode) { + span.setAttribute("http.status_code", err.statusCode); + } + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } + tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.OK, + }); + span.end(); + } + catch (error) { + logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ServiceClient sends service requests and receives responses. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials, + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ + options) { + if (!options) { + options = {}; + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + let requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + logger.info("ServiceClient: using custom request policies"); + requestPolicyFactories = options.requestPolicyFactories; + } + else { + let authPolicyFactory = undefined; + if (coreAuth.isTokenCredential(credentials)) { + logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); + // Create a wrapped RequestPolicyFactory here so that we can provide the + // correct scope to the BearerTokenAuthenticationPolicy at the first time + // one is requested. This is needed because generated ServiceClient + // implementations do not set baseUri until after ServiceClient's constructor + // is finished, leaving baseUri empty at the time when it is needed to + // build the correct scope name. + const wrappedPolicyFactory = () => { + let bearerTokenPolicyFactory = undefined; + // eslint-disable-next-line @typescript-eslint/no-this-alias + const serviceClient = this; + const serviceClientOptions = options; + return { + create(nextPolicy, createOptions) { + const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); + if (!credentialScopes) { + throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); + } + if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { + bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); + } + return bearerTokenPolicyFactory.create(nextPolicy, createOptions); + }, + }; + }; + authPolicyFactory = wrappedPolicyFactory(); + } + else if (credentials && typeof credentials.signRequest === "function") { + logger.info("ServiceClient: creating signing policy from provided credentials"); + authPolicyFactory = signingPolicy(credentials); + } + else if (credentials !== undefined && credentials !== null) { + throw new Error("The credentials argument must implement the TokenCredential interface"); + } + logger.info("ServiceClient: using default request policies"); + requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); + if (options.requestPolicyFactories) { + // options.requestPolicyFactories can also be a function that manipulates + // the default requestPolicyFactories array + const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + sendRequest(options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + let httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + let httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + async sendOperationRequest(operationArguments, operationSpec, callback) { + var _a; + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const httpRequest = new WebResource(); + let result; + try { + const baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + const requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue !== undefined && queryParameterValue !== null) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + // The collection is empty, no need to try serializing the current queryParam + continue; + } + else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = + item === undefined || item === null ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat !== undefined && + queryParameter.collectionFormat !== null && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType && operationSpec.requestBody) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue !== undefined && headerValue !== null) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + if (options.spanOptions) { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + httpRequest.spanOptions = options.spanOptions; + } + if (options.tracingContext) { + httpRequest.tracingContext = options.tracingContext; + } + if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { + httpRequest.shouldDeserialize = options.shouldDeserialize; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseStatusCodes === undefined) { + httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); + } + let rawResponse; + let sendRequestError; + try { + rawResponse = await this.sendRequest(httpRequest); + } + catch (error) { + sendRequestError = error; + } + if (sendRequestError) { + if (sendRequestError.response) { + sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || + operationSpec.responses["default"]); + } + result = Promise.reject(sendRequestError); + } + else { + result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); + } + } + catch (error) { + result = Promise.reject(error); + } + const cb = callback; + if (cb) { + result + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + return result; + } +} +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + var _a, _b, _c, _d, _e, _f; + const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; + const updatedOptions = { + rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", + includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, + }; + const xmlCharKey = serializerOptions.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperType.String && + (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + let result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(authPolicyFactory, options) { + const factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (authPolicyFactory) { + factories.push(authPolicyFactory); + } + const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + factories.push(redirectPolicy()); + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + if (isNode) { + factories.push(proxyPolicy(options.proxySettings)); + } + factories.push(logPolicy({ logger: logger.info })); + return factories; +} +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { + const requestPolicyFactories = []; + if (pipelineOptions.sendStreamingJson) { + requestPolicyFactories.push(ndJsonPolicy()); + } + let userAgentValue = undefined; + if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { + const userAgentInfo = []; + userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); + // Add the default user agent value if it isn't already specified + // by the userAgentPrefix option. + const defaultUserAgentInfo = getDefaultUserAgentValue(); + if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { + userAgentInfo.push(defaultUserAgentInfo); + } + userAgentValue = userAgentInfo.join(" "); + } + const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); + const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); + const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); + if (isNode) { + requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); + } + const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); + const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); + requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); + if (redirectOptions.handleRedirects) { + requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); + } + if (authPolicyFactory) { + requestPolicyFactories.push(authPolicyFactory); + } + requestPolicyFactories.push(logPolicy(loggingOptions)); + if (isNode && pipelineOptions.decompressResponse === false) { + requestPolicyFactories.push(disableResponseDecompressionPolicy()); + } + return { + httpClient: pipelineOptions.httpClient, + requestPolicyFactories, + }; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var _a; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); + if (propertyValue !== undefined && propertyValue !== null) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent !== undefined && parent !== null && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +function flattenResponse(_response, responseSpec) { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const addOperationResponse = (obj) => { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (typeName === "Sequence" || isPageableResponse) { + const arrayResponse = [...(_response.parsedBody || [])]; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); +} +function getCredentialScopes(options, baseUri) { + if (options === null || options === void 0 ? void 0 : options.credentialScopes) { + const scopes = options.credentialScopes; + return Array.isArray(scopes) + ? scopes.map((scope) => new URL(scope).toString()) + : new URL(scopes).toString(); + } + if (baseUri) { + return `${baseUri}/.default`; + } + return undefined; +} + +// Copyright (c) Microsoft Corporation. +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +function createSpanFunction(args) { + return coreTracing.createSpanFunction(args); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class ExpiringAccessTokenCache { + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { + this.cachedToken = undefined; + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +class AccessTokenRefresher { + constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { + this.credential = credential; + this.scopes = scopes; + this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; + this.lastCalled = 0; + } + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady() { + // We're only ready for a new refresh if the required milliseconds have passed. + return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); + } + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + async getToken(options) { + this.lastCalled = Date.now(); + const token = await this.credential.getToken(this.scopes, options); + this.promise = undefined; + return token || undefined; + } + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options) { + if (!this.promise) { + this.promise = this.getToken(options); + } + return this.promise; + } +} + +// Copyright (c) Microsoft Corporation. +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +class BasicAuthenticationCredentials { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Authenticates to a service using an API key. + */ +class ApiKeyCredentials { + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource) { + if (!webResource) { + return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + return Promise.resolve(webResource); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} + +Object.defineProperty(exports, 'isTokenCredential', { + enumerable: true, + get: function () { return coreAuth.isTokenCredential; } +}); +exports.AccessTokenRefresher = AccessTokenRefresher; +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.XML_ATTRKEY = XML_ATTRKEY; +exports.XML_CHARKEY = XML_CHARKEY; +exports.applyMixins = applyMixins; +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +exports.createPipelineFromOptions = createPipelineFromOptions; +exports.createSpanFunction = createSpanFunction; +exports.delay = delay; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isNode = isNode; +exports.isValidUuid = isValidUuid; +exports.keepAlivePolicy = keepAlivePolicy; +exports.logPolicy = logPolicy; +exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; +exports.parseXML = parseXML; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stringifyXML = stringifyXML; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.tracingPolicy = tracingPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-http/dist/index.js.map b/node_modules/@azure/core-http/dist/index.js.map new file mode 100644 index 0000000000..18cee14c6e --- /dev/null +++ b/node_modules/@azure/core-http/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/httpHeaders.ts","../src/util/base64.ts","../src/util/constants.ts","../src/util/serializer.common.ts","../src/util/utils.ts","../src/serializer.ts","../src/webResource.ts","../src/url.ts","../src/proxyAgent.ts","../src/util/sanitizer.ts","../src/util/inspect.ts","../src/restError.ts","../src/log.ts","../src/nodeFetchHttpClient.ts","../src/httpPipelineLogLevel.ts","../src/operationOptions.ts","../src/policies/requestPolicy.ts","../src/util/xml.ts","../src/policies/deserializationPolicy.ts","../src/policies/keepAlivePolicy.ts","../src/policies/redirectPolicy.ts","../src/util/exponentialBackoffStrategy.ts","../src/util/typeguards.ts","../src/util/delay.ts","../src/policies/exponentialRetryPolicy.ts","../src/policies/logPolicy.ts","../src/operationParameter.ts","../src/operationSpec.ts","../src/policies/msRestUserAgentPolicy.ts","../src/policies/userAgentPolicy.ts","../src/queryCollectionFormat.ts","../src/policies/bearerTokenAuthenticationPolicy.ts","../src/policies/disableResponseDecompressionPolicy.ts","../src/policies/generateClientRequestIdPolicy.ts","../src/httpClientCache.ts","../src/policies/ndJsonPolicy.ts","../src/policies/proxyPolicy.ts","../src/policies/rpRegistrationPolicy.ts","../src/policies/signingPolicy.ts","../src/policies/systemErrorRetryPolicy.ts","../src/util/throttlingRetryStrategy.ts","../src/policies/throttlingRetryPolicy.ts","../src/policies/tracingPolicy.ts","../src/serviceClient.ts","../src/createSpanLegacy.ts","../src/credentials/accessTokenCache.ts","../src/credentials/accessTokenRefresher.ts","../src/credentials/basicAuthenticationCredentials.ts","../src/credentials/apiKeyCredentials.ts","../src/credentials/topicCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string): string {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(options?: { preserveCase?: boolean }): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n rawHeaders: unknown;\n clone: unknown;\n get: unknown;\n set: unknown;\n contains: unknown;\n remove: unknown;\n headersArray: unknown;\n headerValues: unknown;\n headerNames: unknown;\n toJson: unknown;\n };\n if (\n typeof castObject.rawHeaders === \"function\" &&\n typeof castObject.clone === \"function\" &&\n typeof castObject.get === \"function\" &&\n typeof castObject.set === \"function\" &&\n typeof castObject.contains === \"function\" &&\n typeof castObject.remove === \"function\" &&\n typeof castObject.headersArray === \"function\" &&\n typeof castObject.headerValues === \"function\" &&\n typeof castObject.headerNames === \"function\" &&\n typeof castObject.toJson === \"function\"\n ) {\n return true;\n }\n }\n\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders implements HttpHeadersLike {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName - The name of the header to set. This value is case-insensitive.\n * @param headerValue - The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName - The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName - The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n return this.toJson({ preserveCase: true });\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header values that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(options: { preserveCase?: boolean } = {}): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n if (options.preserveCase) {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name] = header.value;\n }\n } else {\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[getHeaderKey(header.name)] = header.value;\n }\n }\n return result;\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson({ preserveCase: true }));\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n const resultPreservingCasing: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n resultPreservingCasing[header.name] = header.value;\n }\n return new HttpHeaders(resultPreservingCasing);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Encodes a string in base64 format.\n * @param value - The string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value - The Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value - The base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/**\n * A set of constants used internally when processing requests.\n */\nexport const Constants = {\n /**\n * The core-http version\n */\n coreHttpVersion: \"2.2.5\",\n\n /**\n * Specifies HTTP.\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n ServiceUnavailable: 503,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Default key used to access the XML attributes.\n */\nexport const XML_ATTRKEY = \"$\";\n/**\n * Default key used to access the XML value content.\n */\nexport const XML_CHARKEY = \"_\";\n\n/**\n * Options to govern behavior of xml parser and builder.\n */\nexport interface SerializerOptions {\n /**\n * indicates the name of the root element in the resulting XML when building XML.\n */\n rootName?: string;\n /**\n * indicates whether the root element is to be included or not in the output when parsing XML.\n */\n includeRoot?: boolean;\n /**\n * key used to access the XML value content when parsing XML.\n */\n xmlCharKey?: string;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Constants } from \"./constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { XML_ATTRKEY } from \"./serializer.common\";\nimport { v4 as uuidv4 } from \"uuid\";\n\nconst validUuidRegex =\n /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param urlToCheck - The url to check\n * @returns True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param uri - The URI to be encoded.\n * @returns The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param response - The Http Response\n * @returns The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param request - The Http Request object\n * @returns The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param uuid - The uuid as a string that needs to be validated\n * @returns True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Generated UUID\n *\n * @returns RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param promiseFactories - An array of promise factories(A function that return a promise)\n * @param kickstart - Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n * @returns A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(\n promiseFactories: Array,\n kickstart: unknown\n): Promise {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param err - The error occurred if any, while executing the request; otherwise null.\n * @param result - The deserialized response body if an error did not occur.\n * @param request - The raw/actual request sent to the server if an error did not occur.\n * @param response - The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param promise - The Promise to be converted to a callback\n * @returns A function that takes the callback `(cb: Function) => void`\n * @deprecated generated code should instead depend on responseToBody\n */\n// eslint-disable-next-line @typescript-eslint/ban-types\nexport function promiseToCallback(promise: Promise): (cb: Function) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n // eslint-disable-next-line @typescript-eslint/ban-types\n return (cb: Function): void => {\n promise\n .then((data: any) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n return cb(undefined, data);\n })\n .catch((err: Error) => {\n // eslint-disable-next-line promise/no-callback-in-promise\n cb(err);\n });\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(\n promise: Promise\n): (cb: ServiceCallback) => void {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise\n .then((data: HttpOperationResponse) => {\n return process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n })\n .catch((err: Error) => {\n process.nextTick(cb, err);\n });\n };\n}\n\nexport function prepareXMLRootList(\n obj: unknown,\n elementName: string,\n xmlNamespaceKey?: string,\n xmlNamespace?: string\n): { [s: string]: any } {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n\n if (!xmlNamespaceKey || !xmlNamespace) {\n return { [elementName]: obj };\n }\n\n const result = { [elementName]: obj };\n result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace };\n return result;\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param targetCtor - The target object on which the properties need to be applied.\n * @param sourceCtors - An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void {\n const castTargetCtorParam = targetCtorParam as {\n prototype: Record;\n };\n sourceCtors.forEach((sourceCtor) => {\n Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => {\n castTargetCtorParam.prototype[name] = sourceCtor.prototype[name];\n });\n });\n}\n\nconst validateISODuration =\n /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param value - The value to be validated for ISO 8601 duration format.\n * @returns `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param value - The value to search and replace in.\n * @param searchValue - The value to search for in the value argument.\n * @param replaceValue - The value to replace searchValue with in the value argument.\n * @returns The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given entity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value - Any entity\n * @returns true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: unknown): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\n/**\n * @internal\n */\nexport type UnknownObject = { [s: string]: unknown };\n\n/**\n * @internal\n * @returns true when input is an object type that is not null, Array, RegExp, or Date.\n */\nexport function isObject(input: unknown): input is UnknownObject {\n return (\n typeof input === \"object\" &&\n input !== null &&\n !Array.isArray(input) &&\n !(input instanceof RegExp) &&\n !(input instanceof Date)\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n/* eslint-disable eqeqeq */\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\n\n// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications.\n\n/**\n * Used to map raw response objects to final shapes.\n * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON.\n * Also allows pulling values from headers, as well as inserting default values and constants.\n */\nexport class Serializer {\n constructor(\n /**\n * The provided model mapper.\n */\n public readonly modelMappers: { [key: string]: any } = {},\n /**\n * Whether the contents are XML or not.\n */\n public readonly isXML?: boolean\n ) {}\n\n /**\n * Validates constraints, if any. This function will throw if the provided value does not respect those constraints.\n * @param mapper - The definition of data models.\n * @param value - The value.\n * @param objectName - Name of the object. Used in the error messages.\n */\n validateConstraints(mapper: Mapper, value: unknown, objectName: string): void {\n const failValidation = (\n constraintName: keyof MapperConstraints,\n constraintValue: any\n ): Error => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const valueAsNumber = value as number;\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n const valueAsArray = value as any[];\n if (MaxItems != undefined && valueAsArray.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && valueAsArray.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && valueAsArray.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && valueAsArray.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n valueAsArray.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param object - A valid Javascript object to be serialized.\n * @param objectName - Name of the serialized object.\n * @param options - additional options to deserialization.\n * @returns A valid serialized Javascript object.\n */\n serialize(\n mapper: Mapper,\n object: unknown,\n objectName?: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/i) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/i) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/i) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = serializeByteArrayType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = serializeBase64UrlType(objectName, object as Uint8Array);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = serializeSequenceType(\n this,\n mapper as SequenceMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = serializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n } else if (mapperType.match(/^Composite$/i) !== null) {\n payload = serializeCompositeType(\n this,\n mapper as CompositeMapper,\n object,\n objectName,\n Boolean(this.isXML),\n updatedOptions\n );\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper.\n *\n * @param mapper - The mapper which defines the metadata of the serializable object.\n * @param responseBody - A valid Javascript entity to be deserialized.\n * @param objectName - Name of the deserialized object.\n * @param options - Controls behavior of XML parser and builder.\n * @returns A valid deserialized Javascript object.\n */\n deserialize(\n mapper: Mapper,\n responseBody: unknown,\n objectName: string,\n options: SerializerOptions = {}\n ): any {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/i) !== null) {\n payload = deserializeCompositeType(\n this,\n mapper as CompositeMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else {\n if (this.isXML) {\n const xmlCharKey = updatedOptions.xmlCharKey;\n const castResponseBody = responseBody as Record;\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\" i.e., XML_ATTRKEY) and body (\"#\" i.e., XML_CHARKEY) properties,\n * then just reduce the responseBody value to the body (\"#\" i.e., XML_CHARKEY) property.\n */\n if (\n castResponseBody[XML_ATTRKEY] != undefined &&\n castResponseBody[xmlCharKey] != undefined\n ) {\n responseBody = castResponseBody[xmlCharKey];\n }\n }\n\n if (mapperType.match(/^Number$/i) !== null) {\n payload = parseFloat(responseBody as string);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/i) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) {\n payload = new Date(responseBody as string);\n } else if (mapperType.match(/^UnixTime$/i) !== null) {\n payload = unixTimeToDate(responseBody as number);\n } else if (mapperType.match(/^ByteArray$/i) !== null) {\n payload = base64.decodeString(responseBody as string);\n } else if (mapperType.match(/^Base64Url$/i) !== null) {\n payload = base64UrlToByteArray(responseBody as string);\n } else if (mapperType.match(/^Sequence$/i) !== null) {\n payload = deserializeSequenceType(\n this,\n mapper as SequenceMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n } else if (mapperType.match(/^Dictionary$/i) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName,\n updatedOptions\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string): string {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/-/g, \"+\").replace(/_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/i) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/i) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/i) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/i) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/i) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !((typeof Blob === \"function\" || typeof Blob === \"object\") && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = base64.encodeByteArray(value);\n }\n return returnValue;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: Uint8Array): string {\n let returnValue: string = \"\";\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n returnValue = bufferToBase64Url(value) || \"\";\n }\n return returnValue;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string): any {\n if (value != undefined) {\n if (typeName.match(/^Date$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/i) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/i) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any[] {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n const serializedValue = serializer.serialize(elementType, object[i], objectName, options);\n\n if (isXml && elementType.xmlNamespace) {\n const xmlnsKey = elementType.xmlNamespacePrefix\n ? `xmlns:${elementType.xmlNamespacePrefix}`\n : \"xmlns\";\n if (elementType.type.name === \"Composite\") {\n tempArray[i] = { ...serializedValue };\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n } else {\n tempArray[i] = {};\n tempArray[i][options.xmlCharKey] = serializedValue;\n tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace };\n }\n } else {\n tempArray[i] = serializedValue;\n }\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): { [key: string]: any } {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n const serializedValue = serializer.serialize(valueType, object[key], objectName, options);\n // If the element needs an XML namespace we need to add it within the $ property\n tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options);\n }\n\n // Add the namespace to the root element if needed\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : \"xmlns\";\n\n const result = tempDictionary;\n result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace };\n return result;\n }\n\n return tempDictionary;\n}\n\n/**\n * Resolves the additionalProperties property from a referenced mapper.\n * @param serializer - The serializer containing the entire set of mappers.\n * @param mapper - The composite mapper to resolve.\n * @param objectName - Name of the object being serialized.\n */\nfunction resolveAdditionalProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): SequenceMapper | BaseMapper | CompositeMapper | DictionaryMapper | EnumMapper | undefined {\n const additionalProperties = mapper.type.additionalProperties;\n\n if (!additionalProperties && mapper.type.className) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n return modelMapper?.type.additionalProperties;\n }\n\n return additionalProperties;\n}\n\n/**\n * Finds the mapper referenced by `className`.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n * @param objectName - Name of the object being serialized\n */\nfunction resolveReferencedMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): CompositeMapper | undefined {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n return serializer.modelMappers[className];\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer - The serializer containing the entire set of mappers\n * @param mapper - The composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const modelMapper = resolveReferencedMapper(serializer, mapper, objectName);\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${mapper.type.className}\".`);\n }\n modelProps = modelMapper?.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(modelMapper)}\" of type \"${\n mapper.type.className\n }\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string,\n isXml: boolean,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (\n childObject == undefined &&\n (object[key] != undefined || propertyMapper.defaultValue !== undefined)\n ) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n if (isXml && mapper.xmlNamespace) {\n const xmlnsKey = mapper.xmlNamespacePrefix\n ? `xmlns:${mapper.xmlNamespacePrefix}`\n : \"xmlns\";\n parentObject[XML_ATTRKEY] = {\n ...parentObject[XML_ATTRKEY],\n [xmlnsKey]: mapper.xmlNamespace,\n };\n }\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName,\n options\n );\n\n if (serializedValue !== undefined && propName != undefined) {\n const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options);\n if (isXml && propertyMapper.xmlIsAttribute) {\n // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {};\n parentObject[XML_ATTRKEY][propName] = serializedValue;\n } else if (isXml && propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: value };\n } else {\n parentObject[propName] = value;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName);\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]',\n options\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction getXmlObjectValue(\n propertyMapper: Mapper,\n serializedValue: any,\n isXml: boolean,\n options: Required\n): any {\n if (!isXml || !propertyMapper.xmlNamespace) {\n return serializedValue;\n }\n\n const xmlnsKey = propertyMapper.xmlNamespacePrefix\n ? `xmlns:${propertyMapper.xmlNamespacePrefix}`\n : \"xmlns\";\n const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace };\n\n if ([\"Composite\"].includes(propertyMapper.type.name)) {\n if (serializedValue[XML_ATTRKEY]) {\n return serializedValue;\n } else {\n const result: any = { ...serializedValue };\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n }\n }\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = xmlNamespace;\n return result;\n}\n\nfunction isSpecialXmlProperty(propertyName: string, options: Required): boolean {\n return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName,\n options\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody[XML_ATTRKEY][xmlName!],\n propertyObjectName,\n options\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n if (propertyMapper.xmlIsWrapped) {\n /* a list of wrapped by \n For the xml example below\n \n ...\n ...\n \n the responseBody has\n {\n Cors: {\n CorsRule: [{...}, {...}]\n }\n }\n xmlName is \"Cors\" and xmlElementName is\"CorsRule\".\n */\n const wrapped = responseBody[xmlName!];\n const elementList = wrapped?.[xmlElementName!] ?? [];\n instance[key] = serializer.deserialize(\n propertyMapper,\n elementList,\n propertyObjectName,\n options\n );\n } else {\n const property = responseBody[propertyName!];\n instance[key] = serializer.deserialize(\n propertyMapper,\n property,\n propertyObjectName,\n options\n );\n }\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [k, v] of Object.entries(instance)) {\n if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) {\n arrayInstance[k] = v;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName,\n options\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string): boolean => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]',\n options\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key, options)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): { [key: string]: any } {\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string,\n options: Required\n): any[] {\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(\n element,\n responseBody[i],\n `${objectName}[${i}]`,\n options\n );\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string): any {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\n/**\n * Description of various value constraints such as integer ranges and string regex.\n */\nexport interface MapperConstraints {\n /**\n * The value should be less than or equal to the `InclusiveMaximum` value.\n */\n InclusiveMaximum?: number;\n /**\n * The value should be less than the `ExclusiveMaximum` value.\n */\n ExclusiveMaximum?: number;\n /**\n * The value should be greater than or equal to the `InclusiveMinimum` value.\n */\n InclusiveMinimum?: number;\n /**\n * The value should be greater than the `InclusiveMinimum` value.\n */\n ExclusiveMinimum?: number;\n /**\n * The length should be smaller than the `MaxLength`.\n */\n MaxLength?: number;\n /**\n * The length should be bigger than the `MinLength`.\n */\n MinLength?: number;\n /**\n * The value must match the pattern.\n */\n Pattern?: RegExp;\n /**\n * The value must contain fewer items than the MaxItems value.\n */\n MaxItems?: number;\n /**\n * The value must contain more items than the `MinItems` value.\n */\n MinItems?: number;\n /**\n * The value must contain only unique items.\n */\n UniqueItems?: true;\n /**\n * The value should be exactly divisible by the `MultipleOf` value.\n */\n MultipleOf?: number;\n}\n\n/**\n * Type of the mapper. Includes known mappers.\n */\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\n/**\n * The type of a simple mapper.\n */\nexport interface SimpleMapperType {\n /**\n * Name of the type of the property.\n */\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\n/**\n * Helps build a mapper that describes how to map a set of properties of an object based on other mappers.\n *\n * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`.\n */\nexport interface CompositeMapperType {\n /**\n * Name of the composite mapper type.\n */\n name: \"Composite\";\n\n /**\n * Use `className` to reference another type definition.\n */\n className?: string;\n\n /**\n * Use `modelProperties` when the reference to the other type has been resolved.\n */\n modelProperties?: { [propertyName: string]: Mapper };\n\n /**\n * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object.\n */\n additionalProperties?: Mapper;\n\n /**\n * The name of the top-most parent scheme, the one that has no parents.\n */\n uberParent?: string;\n\n /**\n * A polymorphic discriminator.\n */\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\n/**\n * Helps build a mapper that describes how to parse a sequence of mapped values.\n */\nexport interface SequenceMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Sequence\";\n /**\n * The mapper to use to map each one of the properties of the sequence.\n */\n element: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse a dictionary of mapped values.\n */\nexport interface DictionaryMapperType {\n /**\n * Name of the sequence type mapper.\n */\n name: \"Dictionary\";\n /**\n * The mapper to use to map the value of each property in the dictionary.\n */\n value: Mapper;\n}\n\n/**\n * Helps build a mapper that describes how to parse an enum value.\n */\nexport interface EnumMapperType {\n /**\n * Name of the enum type mapper.\n */\n name: \"Enum\";\n /**\n * Values allowed by this mapper.\n */\n allowedValues: any[];\n}\n\n/**\n * The base definition of a mapper. Can be used for XML and plain JavaScript objects.\n */\nexport interface BaseMapper {\n /**\n * Name for the xml element\n */\n xmlName?: string;\n /**\n * Xml element namespace\n */\n xmlNamespace?: string;\n /**\n * Xml element namespace prefix\n */\n xmlNamespacePrefix?: string;\n /**\n * Determines if the current property should be serialized as an attribute of the parent xml element\n */\n xmlIsAttribute?: boolean;\n /**\n * Name for the xml elements when serializing an array\n */\n xmlElementName?: string;\n /**\n * Whether or not the current property should have a wrapping XML element\n */\n xmlIsWrapped?: boolean;\n /**\n * Whether or not the current property is readonly\n */\n readOnly?: boolean;\n /**\n * Whether or not the current property is a constant\n */\n isConstant?: boolean;\n /**\n * Whether or not the current property is required\n */\n required?: boolean;\n /**\n * Whether or not the current property allows mull as a value\n */\n nullable?: boolean;\n /**\n * The name to use when serializing\n */\n serializedName?: string;\n /**\n * Type of the mapper\n */\n type: MapperType;\n /**\n * Default value when one is not explicitly provided\n */\n defaultValue?: any;\n /**\n * Constraints to test the current value against\n */\n constraints?: MapperConstraints;\n}\n\n/**\n * Mappers are definitions of the data models used in the library.\n * These data models are part of the Operation or Client definitions in the responses or parameters.\n */\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\n/**\n * Used to disambiguate discriminated type unions.\n * For example, if response can have many shapes but also includes a 'kind' field (or similar),\n * that field can be used to determine how to deserialize the response to the correct type.\n */\nexport interface PolymorphicDiscriminator {\n /**\n * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`.\n */\n serializedName: string;\n /**\n * Name to use on the resulting object instead of the original property name.\n * Useful since the JSON property could be difficult to work with.\n * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`.\n */\n clientName: string;\n /**\n * It may contain any other property.\n */\n [key: string]: string;\n}\n\n/**\n * A mapper composed of other mappers.\n */\nexport interface CompositeMapper extends BaseMapper {\n /**\n * The type descriptor of the `CompositeMapper`.\n */\n type: CompositeMapperType;\n}\n\n/**\n * A mapper describing arrays.\n */\nexport interface SequenceMapper extends BaseMapper {\n /**\n * The type descriptor of the `SequenceMapper`.\n */\n type: SequenceMapperType;\n}\n\n/**\n * A mapper describing plain JavaScript objects used as key/value pairs.\n */\nexport interface DictionaryMapper extends BaseMapper {\n /**\n * The type descriptor of the `DictionaryMapper`.\n */\n type: DictionaryMapperType;\n /**\n * Optionally, a prefix to add to the header collection.\n */\n headerCollectionPrefix?: string;\n}\n\n/**\n * A mapper describing an enum value.\n */\nexport interface EnumMapper extends BaseMapper {\n /**\n * The type descriptor of the `EnumMapper`.\n */\n type: EnumMapperType;\n}\n\n/**\n * An interface representing an URL parameter value.\n */\nexport interface UrlParameterValue {\n /**\n * The URL value.\n */\n value: string;\n /**\n * Whether to keep or skip URL encoding.\n */\n skipUrlEncoding: boolean;\n}\n\n/**\n * Utility function that serializes an object that might contain binary information into a plain object, array or a string.\n */\nexport function serializeObject(toSerialize: unknown): any {\n const castToSerialize = toSerialize as Record;\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(castToSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\n/**\n * String enum containing the string types of property mappers.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Context, SpanOptions } from \"@azure/core-tracing\";\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { SerializerOptions } from \"./util/serializer.common\";\nimport { generateUuid } from \"./util/utils\";\n\n/**\n * List of supported HTTP methods.\n */\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\n\n/**\n * Possible HTTP request body types\n */\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * A description of a HTTP request to be made to a remote server.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * A unique identifier for the request. Used for logging and tracing.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: unknown): object is WebResourceLike {\n if (object && typeof object === \"object\") {\n const castObject = object as {\n url: unknown;\n method: unknown;\n headers: unknown;\n validateRequestProperties: unknown;\n prepare: unknown;\n clone: unknown;\n };\n if (\n typeof castObject.url === \"string\" &&\n typeof castObject.method === \"string\" &&\n typeof castObject.headers === \"object\" &&\n isHttpHeadersLike(castObject.headers) &&\n typeof castObject.validateRequestProperties === \"function\" &&\n typeof castObject.prepare === \"function\" &&\n typeof castObject.clone === \"function\"\n ) {\n return true;\n }\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n */\nexport class WebResource implements WebResourceLike {\n /**\n * URL of the outgoing request.\n */\n url: string;\n /**\n * HTTP method to use.\n */\n method: HttpMethods;\n /**\n * Request body.\n */\n body?: any;\n /**\n * HTTP headers.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n * @deprecated Use streamResponseStatusCodes property instead.\n */\n streamResponseBody?: boolean;\n /**\n * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream.\n */\n streamResponseStatusCodes?: Set;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n /**\n * Form data, used to build the request body.\n */\n formData?: any;\n /**\n * Query added to the URL.\n */\n query?: { [key: string]: any };\n /**\n * Specification of the HTTP request.\n */\n operationSpec?: OperationSpec;\n /**\n * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination.\n */\n withCredentials: boolean;\n /**\n * How long to wait in milliseconds before aborting the request.\n */\n timeout: number;\n /**\n * What proxy to use, if necessary.\n */\n proxySettings?: ProxySettings;\n /**\n * Whether to keep the HTTP connections alive throughout requests.\n */\n keepAlive?: boolean;\n /**\n * Whether or not to decompress response according to Accept-Encoding header (node-fetch only)\n */\n decompressResponse?: boolean;\n /**\n * Unique identifier of the outgoing request.\n */\n requestId: string;\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * Tracing: Context used when creating Spans.\n */\n tracingContext?: Context;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: unknown,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n decompressResponse?: boolean,\n streamResponseStatusCodes?: Set\n ) {\n this.streamResponseBody = streamResponseBody;\n this.streamResponseStatusCodes = streamResponseStatusCodes;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.decompressResponse = decompressResponse;\n this.requestId = this.headers.get(\"x-ms-client-request-id\") || generateUuid();\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param options - Options to provide for preparing the request.\n * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (\n options.method === undefined ||\n options.method === null ||\n typeof options.method.valueOf() !== \"string\"\n ) {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate === undefined ||\n options.pathTemplate === null ||\n typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url === undefined ||\n options.url === null ||\n typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({[\\w-]*\\s*[\\w-]*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2);\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in parameters: ${stringifiedPathParameters}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", this.requestId);\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly\n this.body = options.body;\n if (options.body !== undefined && options.body !== null) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n if (options.spanOptions) {\n this.spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n this.tracingContext = options.tracingContext;\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.decompressResponse,\n this.streamResponseStatusCodes\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\n/**\n * Options to prepare an outgoing HTTP request.\n */\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: `{ \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }`\n * - query-parameter-value in \"string\" format: `{ \"query-parameter-name\": \"query-parameter-value\"}`.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}`\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: `{ \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }`\n * - path-parameter-value in \"string\" format: `{ \"path-parameter-name\": \"path-parameter-value\" }`.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n /**\n * Form data, used to build the request body.\n */\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: Record;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Allows keeping track of the progress of uploading the outgoing request.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Allows keeping track of the progress of downloading the incoming response.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Tracing: Options used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n /**\n * Value of the parameter.\n */\n value: any;\n /**\n * Disables URL encoding if set to true.\n */\n skipUrlEncoding: boolean;\n /**\n * Parameter values may contain any other property.\n */\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n\n /**\n * Tracing: Context used when creating spans.\n */\n tracingContext?: Context;\n\n /**\n * May contain other properties.\n */\n [key: string]: any;\n\n /**\n * Options to override XML parsing/building behavior.\n */\n serializerOptions?: SerializerOptions;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Get the keys of the query string.\n */\n public keys(): string[] {\n return Object.keys(this._rawQuery);\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: unknown): void {\n const caseParameterValue = parameterValue as {\n toString: () => string;\n };\n if (parameterName) {\n if (caseParameterValue !== undefined && caseParameterValue !== null) {\n const newValue = Array.isArray(caseParameterValue)\n ? caseParameterValue\n : caseParameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port === undefined || port === null || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n let tokenPath: string | undefined;\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n tokenPath = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n /**\n * Serializes the URL as a string.\n * @returns the URL as a string.\n */\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n /**\n * Parses a given string URL into a new {@link URLBuilder}.\n */\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state !== undefined && state !== null ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const host = URLBuilder.parse(proxySettings.host).getHost() as string;\n if (!host) {\n throw new Error(\"Expecting a non-empty host in proxy settings.\");\n }\n if (!isValidPort(proxySettings.port)) {\n throw new Error(\"Expecting a valid port number in the range of [0, 65535] in proxy settings.\");\n }\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: host,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const isRequestHttps = isUrlHttps(requestUrl);\n const isProxyHttps = isUrlHttps(proxySettings.host);\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\nexport function isUrlHttps(url: string): boolean {\n const urlScheme = URLBuilder.parse(url).getScheme() || \"\";\n return urlScheme.toLowerCase() === \"https\";\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: tunnel.HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n\nfunction isValidPort(port: number): boolean {\n // any port in 0-65535 range is valid (RFC 793) even though almost all implementations\n // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports\n return 0 <= port && port <= 65535;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { URLBuilder, URLQuery } from \"../url\";\nimport { UnknownObject, isObject } from \"./utils\";\n\nexport interface SanitizerOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n}\n\nconst RedactedString = \"REDACTED\";\n\nconst defaultAllowedHeaderNames = [\n \"x-ms-client-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-useragent\",\n \"x-ms-correlation-request-id\",\n \"x-ms-request-id\",\n \"client-request-id\",\n \"ms-cv\",\n \"return-client-request-id\",\n \"traceparent\",\n\n \"Access-Control-Allow-Credentials\",\n \"Access-Control-Allow-Headers\",\n \"Access-Control-Allow-Methods\",\n \"Access-Control-Allow-Origin\",\n \"Access-Control-Expose-Headers\",\n \"Access-Control-Max-Age\",\n \"Access-Control-Request-Headers\",\n \"Access-Control-Request-Method\",\n \"Origin\",\n\n \"Accept\",\n \"Accept-Encoding\",\n \"Cache-Control\",\n \"Connection\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"ETag\",\n \"Expires\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"Last-Modified\",\n \"Pragma\",\n \"Request-Id\",\n \"Retry-After\",\n \"Server\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"WWW-Authenticate\",\n];\n\nconst defaultAllowedQueryParameters: string[] = [\"api-version\"];\n\nexport class Sanitizer {\n public allowedHeaderNames: Set;\n public allowedQueryParameters: Set;\n\n constructor({ allowedHeaderNames = [], allowedQueryParameters = [] }: SanitizerOptions = {}) {\n allowedHeaderNames = Array.isArray(allowedHeaderNames)\n ? defaultAllowedHeaderNames.concat(allowedHeaderNames)\n : defaultAllowedHeaderNames;\n\n allowedQueryParameters = Array.isArray(allowedQueryParameters)\n ? defaultAllowedQueryParameters.concat(allowedQueryParameters)\n : defaultAllowedQueryParameters;\n\n this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase()));\n this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase()));\n }\n\n public sanitize(obj: unknown): string {\n const seen = new Set();\n return JSON.stringify(\n obj,\n (key: string, value: unknown) => {\n // Ensure Errors include their interesting non-enumerable members\n if (value instanceof Error) {\n return {\n ...value,\n name: value.name,\n message: value.message,\n };\n }\n\n if (key === \"_headersMap\") {\n return this.sanitizeHeaders(value as UnknownObject);\n } else if (key === \"url\") {\n return this.sanitizeUrl(value as string);\n } else if (key === \"query\") {\n return this.sanitizeQuery(value as UnknownObject);\n } else if (key === \"body\") {\n // Don't log the request body\n return undefined;\n } else if (key === \"response\") {\n // Don't log response again\n return undefined;\n } else if (key === \"operationSpec\") {\n // When using sendOperationRequest, the request carries a massive\n // field with the autorest spec. No need to log it.\n return undefined;\n } else if (Array.isArray(value) || isObject(value)) {\n if (seen.has(value)) {\n return \"[Circular]\";\n }\n seen.add(value);\n }\n\n return value;\n },\n 2\n );\n }\n\n private sanitizeHeaders(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value);\n }\n\n private sanitizeQuery(value: UnknownObject): UnknownObject {\n return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]);\n }\n\n private sanitizeObject(\n value: UnknownObject,\n allowedKeys: Set,\n accessor: (value: any, key: string) => any\n ): UnknownObject {\n if (typeof value !== \"object\" || value === null) {\n return value;\n }\n\n const sanitized: UnknownObject = {};\n\n for (const k of Object.keys(value)) {\n if (allowedKeys.has(k.toLowerCase())) {\n sanitized[k] = accessor(value, k);\n } else {\n sanitized[k] = RedactedString;\n }\n }\n\n return sanitized;\n }\n\n private sanitizeUrl(value: string): string {\n if (typeof value !== \"string\" || value === null) {\n return value;\n }\n\n const urlBuilder = URLBuilder.parse(value);\n const queryString = urlBuilder.getQuery();\n\n if (!queryString) {\n return value;\n }\n\n const query = URLQuery.parse(queryString);\n for (const k of query.keys()) {\n if (!this.allowedQueryParameters.has(k.toLowerCase())) {\n query.set(k, RedactedString);\n }\n }\n\n urlBuilder.setQuery(query.toString());\n return urlBuilder.toString();\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { inspect } from \"util\";\n\nexport const custom = inspect.custom;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { Sanitizer } from \"./util/sanitizer\";\nimport { WebResourceLike } from \"./webResource\";\nimport { custom } from \"./util/inspect\";\n\nconst errorSanitizer = new Sanitizer();\n\n/**\n * An error resulting from an HTTP request to a service endpoint.\n */\nexport class RestError extends Error {\n /**\n * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.)\n */\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n /**\n * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete.\n */\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n /**\n * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT).\n */\n code?: string;\n /**\n * The HTTP status code of the response, if one was returned.\n */\n statusCode?: number;\n /**\n * Outgoing request.\n */\n request?: WebResourceLike;\n /**\n * Incoming response.\n */\n response?: HttpOperationResponse;\n /**\n * Any additional details. In the case of deserialization errors, can be the processed response.\n */\n details?: unknown;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ) {\n super(message);\n this.name = \"RestError\";\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n\n /**\n * Logging method for util.inspect in Node\n */\n [custom](): string {\n return `RestError: ${this.message} \\n ${errorSanitizer.sanitize(this)}`;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { createClientLogger } from \"@azure/logger\";\nexport const logger = createClientLogger(\"core-http\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tough from \"tough-cookie\";\nimport { AbortController, AbortError } from \"@azure/abort-controller\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { ProxyAgent, createProxyAgent, isUrlHttps } from \"./proxyAgent\";\nimport { Readable, Transform } from \"stream\";\nimport { TransferProgressEvent, WebResourceLike } from \"./webResource\";\nimport FormData from \"form-data\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\nimport { logger } from \"./log\";\nimport node_fetch from \"node-fetch\";\n\ninterface AgentCache {\n httpAgent?: http.Agent;\n httpsAgent?: https.Agent;\n}\n\nfunction getCachedAgent(\n isHttps: boolean,\n agentCache: AgentCache\n): http.Agent | https.Agent | undefined {\n return isHttps ? agentCache.httpsAgent : agentCache.httpAgent;\n}\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\n/**\n * String URLs used when calling to `fetch()`.\n */\nexport type CommonRequestInfo = string;\n\n/**\n * An object containing information about the outgoing HTTP request.\n */\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\n/**\n * An object containing information about the incoming HTTP response.\n */\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport class ReportTransform extends Transform {\n private loadedBytes: number = 0;\n _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void {\n this.push(chunk);\n this.loadedBytes += chunk.length;\n this.progressCallback!({ loadedBytes: this.loadedBytes });\n callback(undefined);\n }\n\n constructor(private progressCallback: (progress: TransferProgressEvent) => void) {\n super();\n }\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable, aborter?: AbortController): Promise {\n return new Promise((resolve) => {\n stream.once(\"close\", () => {\n aborter?.abort();\n resolve();\n });\n stream.once(\"end\", resolve);\n stream.once(\"error\", resolve);\n });\n}\n\n/**\n * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike}\n */\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n\n/**\n * An HTTP client that uses `node-fetch`.\n */\nexport class NodeFetchHttpClient implements HttpClient {\n /**\n * Provides minimum viable error handling and the logic that executes the abstract methods.\n * @param httpRequest - Object representing the outgoing HTTP request.\n * @returns An object representing the incoming HTTP response.\n */\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any): void => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (\n value &&\n Object.prototype.hasOwnProperty.call(value, \"value\") &&\n Object.prototype.hasOwnProperty.call(value, \"options\")\n ) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n const onUploadProgress = httpRequest.onUploadProgress;\n const uploadReportStream = new ReportTransform(onUploadProgress);\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n\n const streaming =\n httpRequest.streamResponseStatusCodes?.has(response.status) ||\n httpRequest.streamResponseBody;\n\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: streaming\n ? (response.body as unknown as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !streaming ? await response.text() : undefined,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n const downloadReportStream = new ReportTransform(onDownloadProgress);\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error: any) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new AbortError(\"The operation was aborted.\");\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(\n operationResponse!.readableStreamBody,\n abortController\n );\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((e) => {\n logger.warning(\"Error when cleaning up abortListener on httpRequest\", e);\n });\n }\n }\n }\n\n // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent\n private proxyAgentMap: Map = new Map();\n private keepAliveAgents: AgentCache = {};\n\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n private getOrCreateAgent(httpRequest: WebResourceLike): http.Agent | https.Agent {\n const isHttps = isUrlHttps(httpRequest.url);\n\n // At the moment, proxy settings and keepAlive are mutually\n // exclusive because the 'tunnel' library currently lacks the\n // ability to create a proxy with keepAlive turned on.\n if (httpRequest.proxySettings) {\n const { host, port, username, password } = httpRequest.proxySettings;\n const key = `${host}:${port}:${username}:${password}`;\n const proxyAgents = this.proxyAgentMap.get(key) ?? {};\n\n let agent = getCachedAgent(isHttps, proxyAgents);\n if (agent) {\n return agent;\n }\n\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n\n agent = tunnel.agent;\n if (tunnel.isHttps) {\n proxyAgents.httpsAgent = tunnel.agent as https.Agent;\n } else {\n proxyAgents.httpAgent = tunnel.agent;\n }\n this.proxyAgentMap.set(key, proxyAgents);\n\n return agent;\n } else if (httpRequest.keepAlive) {\n let agent = getCachedAgent(isHttps, this.keepAliveAgents);\n if (agent) {\n return agent;\n }\n\n const agentOptions: http.AgentOptions | https.AgentOptions = {\n keepAlive: httpRequest.keepAlive,\n };\n\n if (isHttps) {\n agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions);\n } else {\n agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions);\n }\n\n return agent;\n } else {\n return isHttps ? https.globalAgent : http.globalAgent;\n }\n }\n\n /**\n * Uses `node-fetch` to perform the request.\n */\n // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return node_fetch(input, init) as unknown as Promise;\n }\n\n /**\n * Prepares a request based on the provided web resource.\n */\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n // Set the http(s) agent\n requestInit.agent = this.getOrCreateAgent(httpRequest);\n\n requestInit.compress = httpRequest.decompressResponse;\n\n return requestInit;\n }\n\n /**\n * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a \"Set-Cookie\" header.\n */\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader !== undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestOptionsBase, TransferProgressEvent } from \"./webResource\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * The base options type for all operations.\n */\nexport interface OperationOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: OperationRequestOptions;\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * Options that allow configuring the handling of HTTP requests made by an SDK operation.\n */\nexport interface OperationRequestOptions {\n /**\n * User defined custom request headers that will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n}\n\n/**\n * Converts an OperationOptions to a RequestOptionsBase\n *\n * @param opts - OperationOptions object to convert to RequestOptionsBase\n */\nexport function operationOptionsToRequestOptionsBase(\n opts: T\n): RequestOptionsBase {\n const { requestOptions, tracingOptions, ...additionalOptions } = opts;\n\n let result: RequestOptionsBase = additionalOptions;\n\n if (requestOptions) {\n result = { ...result, ...requestOptions };\n }\n\n if (tracingOptions) {\n result.tracingContext = tracingOptions.tracingContext;\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n result.spanOptions = (tracingOptions as any)?.spanOptions;\n }\n\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\n/**\n * The underlying structure of a request policy.\n */\nexport interface RequestPolicy {\n /**\n * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made.\n * @param httpRequest - {@link WebResourceLike} describing the request to be made.\n */\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\n/**\n * The base class from which all request policies derive.\n */\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n /**\n * The main method to implement that manipulates a request/response.\n */\n protected constructor(\n /**\n * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline.\n */\n readonly _nextPolicy: RequestPolicy,\n /**\n * The options that can be passed to a given request policy.\n */\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n /**\n * Sends a network request based on the given web resource.\n * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made.\n */\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel - The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel - The log level of this log.\n * @param message - The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as xml2js from \"xml2js\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./serializer.common\";\n\n// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed\n// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536\n// By creating a new copy of the settings each time we instantiate the parser,\n// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally.\nconst xml2jsDefaultOptionsV2: xml2js.OptionsV2 = {\n explicitCharkey: false,\n trim: false,\n normalize: false,\n normalizeTags: false,\n attrkey: XML_ATTRKEY,\n explicitArray: true,\n ignoreAttrs: false,\n mergeAttrs: false,\n explicitRoot: true,\n validator: undefined,\n xmlns: false,\n explicitChildren: false,\n preserveChildrenOrder: false,\n childkey: \"$$\",\n charsAsChildren: false,\n includeWhiteChars: false,\n async: false,\n strict: true,\n attrNameProcessors: undefined,\n attrValueProcessors: undefined,\n tagNameProcessors: undefined,\n valueProcessors: undefined,\n rootName: \"root\",\n xmldec: {\n version: \"1.0\",\n encoding: \"UTF-8\",\n standalone: true,\n },\n doctype: undefined,\n renderOpts: {\n pretty: true,\n indent: \" \",\n newline: \"\\n\",\n },\n headless: false,\n chunkSize: 10000,\n emptyTag: \"\",\n cdata: false,\n};\n\n// The xml2js settings for general XML parsing operations.\nconst xml2jsParserSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsParserSettings.explicitArray = false;\n\n// The xml2js settings for general XML building operations.\nconst xml2jsBuilderSettings: any = Object.assign({}, xml2jsDefaultOptionsV2);\nxml2jsBuilderSettings.explicitArray = false;\nxml2jsBuilderSettings.renderOpts = {\n pretty: false,\n};\n\n/**\n * Converts given JSON object to XML string\n * @param obj - JSON object to be converted into XML string\n * @param opts - Options that govern the parsing of given JSON object\n */\nexport function stringifyXML(obj: unknown, opts: SerializerOptions = {}): string {\n xml2jsBuilderSettings.rootName = opts.rootName;\n xml2jsBuilderSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const builder = new xml2js.Builder(xml2jsBuilderSettings);\n return builder.buildObject(obj);\n}\n\n/**\n * Converts given XML string into JSON\n * @param str - String containing the XML content to be parsed into JSON\n * @param opts - Options that govern the parsing of given xml string\n */\nexport function parseXML(str: string, opts: SerializerOptions = {}): Promise {\n xml2jsParserSettings.explicitRoot = !!opts.includeRoot;\n xml2jsParserSettings.charkey = opts.xmlCharKey ?? XML_CHARKEY;\n const xmlParser = new xml2js.Parser(xml2jsParserSettings);\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err, res) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { SerializerOptions, XML_CHARKEY } from \"../util/serializer.common\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { MapperType } from \"../serializer\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { parseXML } from \"../util/xml\";\n\n/**\n * Options to configure API response deserialization.\n */\nexport interface DeserializationOptions {\n /**\n * Configures the expected content types for the deserialization of\n * JSON and XML response bodies.\n */\n expectedContentTypes: DeserializationContentTypes;\n}\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions?: SerializerOptions\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DeserializationPolicy(\n nextPolicy,\n options,\n deserializationContentTypes,\n parsingOptions\n );\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\nexport const DefaultDeserializationOptions: DeserializationOptions = {\n expectedContentTypes: {\n json: defaultJsonContentTypes,\n xml: defaultXmlContentTypes,\n },\n};\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n public readonly xmlCharKey: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n requestPolicyOptions: RequestPolicyOptions,\n deserializationContentTypes?: DeserializationContentTypes,\n parsingOptions: SerializerOptions = {}\n ) {\n super(nextPolicy, requestPolicyOptions);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n this.xmlCharKey = parsingOptions.xmlCharKey ?? XML_CHARKEY;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, {\n xmlCharKey: this.xmlCharKey,\n })\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\n/**\n * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}.\n * @param jsonContentTypes - Response content types to parse the body as JSON.\n * @param xmlContentTypes - Response content types to parse the body as XML.\n * @param response - HTTP Response from the pipeline.\n * @param options - Options to the serializer, mostly for configuring the XML parser if needed.\n * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}.\n */\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse,\n options: SerializerOptions = {}\n): Promise {\n const updatedOptions: Required = {\n rootName: options.rootName ?? \"\",\n includeRoot: options.includeRoot ?? false,\n xmlCharKey: options.xmlCharKey ?? XML_CHARKEY,\n };\n return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then(\n (parsedResponse) => {\n if (!shouldDeserializeResponse(parsedResponse)) {\n return parsedResponse;\n }\n\n const operationSpec = parsedResponse.request.operationSpec;\n if (!operationSpec || !operationSpec.responses) {\n return parsedResponse;\n }\n\n const responseSpec = getOperationResponse(parsedResponse);\n\n const { error, shouldReturnResponse } = handleErrorResponse(\n parsedResponse,\n operationSpec,\n responseSpec\n );\n if (error) {\n throw error;\n } else if (shouldReturnResponse) {\n return parsedResponse;\n }\n\n // An operation response spec does exist for current status code, so\n // use it to deserialize the response.\n if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\",\n options\n );\n } catch (innerError: any) {\n const restError = new RestError(\n `Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n throw restError;\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\",\n options\n );\n }\n }\n\n return parsedResponse;\n }\n );\n}\n\nfunction isOperationSpecEmpty(operationSpec: OperationSpec): boolean {\n const expectedStatusCodes = Object.keys(operationSpec.responses);\n return (\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\")\n );\n}\n\nfunction handleErrorResponse(\n parsedResponse: HttpOperationResponse,\n operationSpec: OperationSpec,\n responseSpec: OperationResponse | undefined\n): { error: RestError | null; shouldReturnResponse: boolean } {\n const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300;\n const isExpectedStatusCode: boolean = isOperationSpecEmpty(operationSpec)\n ? isSuccessByStatus\n : !!responseSpec;\n\n if (isExpectedStatusCode) {\n if (responseSpec) {\n if (!responseSpec.isError) {\n return { error: null, shouldReturnResponse: false };\n }\n } else {\n return { error: null, shouldReturnResponse: false };\n }\n }\n\n const errorResponseSpec = responseSpec ?? operationSpec.responses.default;\n const streaming =\n parsedResponse.request.streamResponseStatusCodes?.has(parsedResponse.status) ||\n parsedResponse.request.streamResponseBody;\n const initialErrorMessage = streaming\n ? `Unexpected status code: ${parsedResponse.status}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(\n initialErrorMessage,\n undefined,\n parsedResponse.status,\n parsedResponse.request,\n parsedResponse\n );\n\n // If the item failed but there's no error spec or default spec to deserialize the error,\n // we should fail so we just throw the parsed response\n if (!errorResponseSpec) {\n throw error;\n }\n\n const defaultBodyMapper = errorResponseSpec.bodyMapper;\n const defaultHeadersMapper = errorResponseSpec.headersMapper;\n\n try {\n // If error response has a body, try to deserialize it using default body mapper.\n // Then try to extract error code & message from it\n if (parsedResponse.parsedBody) {\n const parsedBody = parsedResponse.parsedBody;\n let parsedError;\n if (defaultBodyMapper) {\n let valueToDeserialize: any = parsedBody;\n if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof parsedBody === \"object\" ? parsedBody[defaultBodyMapper.xmlElementName!] : [];\n }\n parsedError = operationSpec.serializer.deserialize(\n defaultBodyMapper,\n valueToDeserialize,\n \"error.response.parsedBody\"\n );\n }\n\n const internalError: any = parsedBody.error || parsedError || parsedBody;\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n\n if (defaultBodyMapper) {\n error.response!.parsedBody = parsedError;\n }\n }\n\n // If error response has headers, try to deserialize it using default header mapper\n if (parsedResponse.headers && defaultHeadersMapper) {\n error.response!.parsedHeaders = operationSpec.serializer.deserialize(\n defaultHeadersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n } catch (defaultError: any) {\n error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`;\n }\n\n return { error, shouldReturnResponse: false };\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse,\n opts: Required\n): Promise {\n const errorHandler = (err: Error & { code: string }): Promise => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse\n );\n return Promise.reject(e);\n };\n\n const streaming =\n operationResponse.request.streamResponseStatusCodes?.has(operationResponse.status) ||\n operationResponse.request.streamResponseBody;\n if (!streaming && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text, opts)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Options for how HTTP connections should be maintained for future\n * requests.\n */\nexport interface KeepAliveOptions {\n /**\n * When true, connections will be kept alive for multiple requests.\n * Defaults to true.\n */\n enable: boolean;\n}\n\n/**\n * By default, HTTP connections are maintained for future requests.\n */\nexport const DefaultKeepAliveOptions: KeepAliveOptions = {\n enable: true,\n};\n\n/**\n * Creates a policy that controls whether HTTP connections are maintained on future requests.\n * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests.\n * @returns An instance of the {@link KeepAlivePolicy}\n */\nexport function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions);\n },\n };\n}\n\n/**\n * KeepAlivePolicy is a policy used to control keep alive settings for every request.\n */\nexport class KeepAlivePolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param keepAliveOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private readonly keepAliveOptions: KeepAliveOptions\n ) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResourceLike): Promise {\n request.keepAlive = this.keepAliveOptions.enable;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Methods that are allowed to follow redirects 301 and 302\n */\nconst allowedRedirect = [\"GET\", \"HEAD\"];\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /**\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /**\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\n/**\n * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n * @param maximumRetries - Maximum number of redirects to follow.\n * @returns An instance of the {@link RedirectPolicy}\n */\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\n/**\n * Resends the request to a new destination if a response arrives with a \"location\" header, and a status code between 300 and 307.\n */\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, readonly maxRetries = 20) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && allowedRedirect.includes(request.method)) ||\n (status === 302 && allowedRedirect.includes(request.method)) ||\n (status === 303 && request.method === \"POST\") ||\n status === 307) &&\n (!policy.maxRetries || currentRetries < policy.maxRetries)\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n if (status === 303) {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1));\n }\n\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpOperationResponse } from \"../coreHttp\";\n\nexport const DEFAULT_CLIENT_RETRY_COUNT = 3;\n// intervals are in ms\nexport const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nexport const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nexport const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\nexport function isNumber(n: unknown): n is number {\n return typeof n === \"number\";\n}\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\n/**\n * @internal\n * Determines if the operation should be retried.\n *\n * @param retryLimit - Specifies the max number of retries.\n * @param predicate - Initial chekck on whether to retry based on given responses or errors\n * @param retryData - The retry data.\n * @returns True if the operation qualifies for a retry; false otherwise.\n */\nexport function shouldRetry(\n retryLimit: number,\n predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean,\n retryData: RetryData,\n response?: HttpOperationResponse,\n error?: RetryError\n): boolean {\n if (!predicate(response, error)) {\n return false;\n }\n\n return retryData.retryCount < retryLimit;\n}\n\n/**\n * @internal\n * Updates the retry data for the next attempt.\n *\n * @param retryOptions - specifies retry interval, and its lower bound and upper bound.\n * @param retryData - The retry data.\n * @param err - The operation\"s error, if any.\n */\nexport function updateRetryData(\n retryOptions: { retryInterval: number; minRetryInterval: number; maxRetryInterval: number },\n retryData: RetryData = { retryCount: 0, retryInterval: 0 },\n err?: RetryError\n): RetryData {\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1;\n const boundedRandDelta =\n retryOptions.retryInterval * 0.8 +\n Math.floor(Math.random() * (retryOptions.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n retryOptions.minRetryInterval + incrementDelta,\n retryOptions.maxRetryInterval\n );\n\n return retryData;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Helper TypeGuard that checks if the value is not null or undefined.\n * @param thing - Anything\n * @internal\n */\nexport function isDefined(thing: T | undefined | null): thing is T {\n return typeof thing !== \"undefined\" && thing !== null;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError, AbortSignalLike } from \"@azure/abort-controller\";\nimport { isDefined } from \"./typeguards\";\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds.\n * @param delayInMs - The number of milliseconds to be delayed.\n * @param value - The value to be resolved with after a timeout of t milliseconds.\n * @param options - The options for delay - currently abort options\n * @param abortSignal - The abortSignal associated with containing operation.\n * @param abortErrorMsg - The abort error message associated with containing operation.\n * @returns - Resolved promise\n */\nexport function delay(\n delayInMs: number,\n value?: T,\n options?: {\n abortSignal?: AbortSignalLike;\n abortErrorMsg?: string;\n }\n): Promise {\n return new Promise((resolve, reject) => {\n let timer: ReturnType | undefined = undefined;\n let onAborted: (() => void) | undefined = undefined;\n\n const rejectOnAbort = (): void => {\n return reject(\n new AbortError(options?.abortErrorMsg ? options?.abortErrorMsg : StandardAbortMessage)\n );\n };\n\n const removeListeners = (): void => {\n if (options?.abortSignal && onAborted) {\n options.abortSignal.removeEventListener(\"abort\", onAborted);\n }\n };\n\n onAborted = (): void => {\n if (isDefined(timer)) {\n clearTimeout(timer);\n }\n removeListeners();\n return rejectOnAbort();\n };\n\n if (options?.abortSignal && options.abortSignal.aborted) {\n return rejectOnAbort();\n }\n\n timer = setTimeout(() => {\n removeListeners();\n resolve(value);\n }, delayInMs);\n\n if (options?.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", onAborted);\n }\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\nimport { logger } from \"../log\";\n\n/**\n * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time.\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - Base time between retries.\n * @param maxRetryInterval - Maximum time to wait between retries.\n */\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * Describes the Retry Mode type. Currently supporting only Exponential.\n */\nexport enum RetryMode {\n /**\n * Currently supported retry mode.\n * Each time a retry happens, it will take exponentially more time than the last time.\n */\n Exponential,\n}\n\n/**\n * Options that control how to retry failed requests.\n */\nexport interface RetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n\n /**\n * The amount of delay in milliseconds between retry attempts. Defaults to 30000\n * (30 seconds). The delay increases exponentially with each retry up to a maximum\n * specified by maxRetryDelayInMs.\n */\n retryDelayInMs?: number;\n\n /**\n * The maximum delay in milliseconds allowed before retrying an operation. Defaults\n * to 90000 (90 seconds).\n */\n maxRetryDelayInMs?: number;\n\n /**\n * Currently supporting only Exponential mode.\n */\n mode?: RetryMode;\n}\n\nexport const DefaultRetryOptions: RetryOptions = {\n maxRetries: DEFAULT_CLIENT_RETRY_COUNT,\n retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL,\n maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n};\n\n/**\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @param nextPolicy - The next RequestPolicy in the pipeline chain.\n * @param options - The options for this RequestPolicy.\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\nasync function retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n function shouldPolicyRetry(responseParam?: HttpOperationResponse): boolean {\n const statusCode = responseParam?.status;\n if (statusCode === 503 && response?.headers.get(Constants.HeaderConstants.RETRY_AFTER)) {\n return false;\n }\n\n if (\n statusCode === undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n return true;\n }\n\n retryData = updateRetryData(\n {\n retryInterval: policy.retryInterval,\n minRetryInterval: 0,\n maxRetryInterval: policy.maxRetryInterval,\n },\n retryData,\n requestError\n );\n\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) {\n logger.info(`Retrying request in ${retryData.retryInterval}`);\n try {\n await delay(retryData.retryInterval);\n const res = await policy._nextPolicy.sendRequest(request.clone());\n return retry(policy, request, res, retryData);\n } catch (err: any) {\n return retry(policy, request, response, retryData, err);\n }\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n throw err;\n } else {\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Debugger } from \"@azure/logger\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Sanitizer } from \"../util/sanitizer\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger as coreLogger } from \"../log\";\n\n/**\n * Options to pass to the {@link logPolicy}.\n * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged.\n */\nexport interface LogPolicyOptions {\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to:\n * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id,\n * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials,\n * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers,\n * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding,\n * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match,\n * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent.\n *\n * Any headers specified in this field will be added to that list.\n * Any other values will be written to logs as \"REDACTED\".\n */\n allowedHeaderNames?: string[];\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n */\n allowedQueryParameters?: string[];\n\n /**\n * The Debugger (logger) instance to use for writing pipeline logs.\n */\n logger?: Debugger;\n}\n\n/**\n * Creates a policy that logs information about the outgoing request and the incoming responses.\n * @param loggingOptions - Logging options.\n * @returns An instance of the {@link LogPolicy}\n */\nexport function logPolicy(loggingOptions: LogPolicyOptions = {}): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new LogPolicy(nextPolicy, options, loggingOptions);\n },\n };\n}\n\n/**\n * A policy that logs information about the outgoing request and the incoming responses.\n */\nexport class LogPolicy extends BaseRequestPolicy {\n logger: Debugger;\n sanitizer: Sanitizer;\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedHeaderNames(): Set {\n return this.sanitizer.allowedHeaderNames;\n }\n\n /**\n * Header names whose values will be logged when logging is enabled. Defaults to\n * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers\n * specified in this field will be added to that list. Any other values will\n * be written to logs as \"REDACTED\".\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedHeaderNames(allowedHeaderNames: Set) {\n this.sanitizer.allowedHeaderNames = allowedHeaderNames;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public get allowedQueryParameters(): Set {\n return this.sanitizer.allowedQueryParameters;\n }\n\n /**\n * Query string names whose values will be logged when logging is enabled. By default no\n * query string values are logged.\n * @deprecated Pass these into the constructor instead.\n */\n public set allowedQueryParameters(allowedQueryParameters: Set) {\n this.sanitizer.allowedQueryParameters = allowedQueryParameters;\n }\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n {\n logger = coreLogger.info,\n allowedHeaderNames = [],\n allowedQueryParameters = [],\n }: LogPolicyOptions = {}\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters });\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!this.logger.enabled) return this._nextPolicy.sendRequest(request);\n\n this.logRequest(request);\n return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response));\n }\n\n private logRequest(request: WebResourceLike): void {\n this.logger(`Request: ${this.sanitizer.sanitize(request)}`);\n }\n\n private logResponse(response: HttpOperationResponse): HttpOperationResponse {\n this.logger(`Response status code: ${response.status}`);\n this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`);\n return response;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Mapper } from \"./serializer\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\n\n/**\n * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values.\n */\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter - The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { MapperType, Serializer } from \"./serializer\";\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { HttpMethods } from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\n\n/**\n * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The media type of the request body.\n * This value can be used to aide in serialization if it is provided.\n */\n readonly mediaType?:\n | \"json\"\n | \"xml\"\n | \"form\"\n | \"binary\"\n | \"multipart\"\n | \"text\"\n | \"unknown\"\n | string;\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\n/**\n * Gets the list of status codes for streaming responses.\n * @internal\n */\nexport function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set {\n const result = new Set();\n for (const statusCode in operationSpec.responses) {\n const operationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result.add(Number(statusCode));\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as os from \"os\";\nimport { Constants } from \"../util/constants\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Telemetry information. Key/value pairs to include inside the User-Agent string.\n */\nexport type TelemetryInfo = { key?: string; value?: string };\n\n/**\n * Options for adding user agent details to outgoing requests.\n */\nexport interface UserAgentOptions {\n /**\n * String prefix to add to the user agent for outgoing requests.\n * Defaults to an empty string.\n */\n userAgentPrefix?: string;\n}\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"core-http\",\n value: Constants.coreHttpVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\n/**\n * The default approach to generate user agents.\n * Uses static information from this package, plus system information available from the runtime.\n */\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\n/**\n * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n * @param userAgentData - Telemetry information.\n * @returns A new {@link UserAgentPolicy}.\n */\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key === undefined || userAgentData.key === null\n ? getDefaultUserAgentKey()\n : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value === undefined || userAgentData.value === null\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\n/**\n * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}.\n */\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptions,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n /**\n * Adds the user agent header to the outgoing request.\n */\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n /**\n * CSV: Each pair of segments joined by a single comma.\n */\n Csv = \",\",\n /**\n * SSV: Each pair of segments joined by a single space character.\n */\n Ssv = \" \",\n /**\n * TSV: Each pair of segments joined by a single tab character.\n */\n Tsv = \"\\t\",\n /**\n * Pipes: Each pair of segments joined by a single pipe character.\n */\n Pipes = \"|\",\n /**\n * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2`\n */\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"../policies/requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nexport const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\nexport function bearerTokenAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n const getToken = createTokenCycler(credential, scopes /* , options */);\n\n class BearerTokenAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const { token } = await getToken({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n });\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new BearerTokenAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResource } from \"../webResource\";\n\n/**\n * Returns a request policy factory that can be used to create an instance of\n * {@link DisableResponseDecompressionPolicy}.\n */\nexport function disableResponseDecompressionPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new DisableResponseDecompressionPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * A policy to disable response decompression according to Accept-Encoding header\n * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding\n */\nexport class DisableResponseDecompressionPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of DisableResponseDecompressionPolicy.\n *\n * @param nextPolicy -\n * @param options -\n */\n // The parent constructor is protected.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n * @returns\n */\n public async sendRequest(request: WebResource): Promise {\n request.decompressResponse = false;\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that assigns a unique request id to outgoing requests.\n * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request.\n */\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, request.requestId);\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\n\nlet cachedHttpClient: HttpClient | undefined;\n\nexport function getCachedDefaultHttpClient(): HttpClient {\n if (!cachedHttpClient) {\n cachedHttpClient = new DefaultHttpClient();\n }\n\n return cachedHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// BaseRequestPolicy has a protected constructor.\n/* eslint-disable @typescript-eslint/no-useless-constructor */\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function ndJsonPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new NdJsonPolicy(nextPolicy, options);\n },\n };\n}\n\n/**\n * NdJsonPolicy that formats a JSON array as newline-delimited JSON\n */\nclass NdJsonPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of KeepAlivePolicy.\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends a request.\n */\n public async sendRequest(request: WebResourceLike): Promise {\n // There currently isn't a good way to bypass the serializer\n if (typeof request.body === \"string\" && request.body.startsWith(\"[\")) {\n const body = JSON.parse(request.body);\n if (Array.isArray(body)) {\n request.body = body.map((item) => JSON.stringify(item) + \"\\n\").join(\"\");\n }\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { Constants } from \"../util/constants\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getEnvironmentValue } from \"../util/utils\";\n\n/**\n * Stores the patterns specified in NO_PROXY environment variable.\n * @internal\n */\nexport const globalNoProxyList: string[] = [];\nlet noProxyListLoaded: boolean = false;\n\n/** A cache of whether a host should bypass the proxy. */\nconst globalBypassedMap: Map = new Map();\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n/**\n * Check whether the host of a given `uri` matches any pattern in the no proxy list.\n * If there's a match, any request sent to the same host shouldn't have the proxy settings set.\n * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\n */\nfunction isBypassed(\n uri: string,\n noProxyList: string[],\n bypassedMap?: Map\n): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (bypassedMap?.has(host)) {\n return bypassedMap.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n bypassedMap?.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n noProxyListLoaded = true;\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed.\n * @param proxyUrl - URL of the proxy\n * @returns The default proxy settings, or undefined.\n */\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\n/**\n * A policy that allows one to apply proxy settings to all requests.\n * If not passed static settings, they will be retrieved from the HTTPS_PROXY\n * or HTTP_PROXY environment variables.\n * @param proxySettings - ProxySettings to use on each request.\n * @param options - additional settings, for example, custom NO_PROXY patterns\n */\nexport function proxyPolicy(\n proxySettings?: ProxySettings,\n options?: {\n /** a list of patterns to override those loaded from NO_PROXY environment variable. */\n customNoProxyList?: string[];\n }\n): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n if (!noProxyListLoaded) {\n globalNoProxyList.push(...loadNoProxy());\n }\n return {\n create: (nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions) => {\n return new ProxyPolicy(\n nextPolicy,\n requestPolicyOptions,\n proxySettings!,\n options?.customNoProxyList\n );\n },\n };\n}\n\nfunction extractAuthFromUrl(url: string): {\n username?: string;\n password?: string;\n urlWithoutAuth: string;\n} {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public proxySettings: ProxySettings,\n private customNoProxyList?: string[]\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (\n !request.proxySettings &&\n !isBypassed(\n request.url,\n this.customNoProxyList ?? globalNoProxyList,\n this.customNoProxyList ? undefined : globalBypassedMap\n )\n ) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"../util/utils\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param originalRequest - The original request\n * @param reuseUrlToo - Should the url from the original request be reused as well. Default false.\n * @returns A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param body - The response body received after making the original request.\n * @returns The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err: any) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param url - The original request url\n * @returns The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param provider - The provider name to be registered.\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n */\nasync function registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n const response = await policy._nextPolicy.sendRequest(reqOptions);\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param policy - The RPRegistrationPolicy this function is being called against.\n * @param url - The request url for polling\n * @param originalRequest - The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns True if RP Registration is successful.\n */\nasync function getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n const res = await policy._nextPolicy.sendRequest(reqOptions);\n const obj = res.parsedBody;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n await delay(policy._retryTimeout * 1000);\n return getRegistrationStatus(policy, url, originalRequest);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n * @param authenticationProvider - The authentication provider.\n * @returns An instance of the {@link SigningPolicy}.\n */\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\n/**\n * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method.\n */\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL,\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL,\n DEFAULT_CLIENT_RETRY_COUNT,\n DEFAULT_CLIENT_RETRY_INTERVAL,\n RetryData,\n RetryError,\n isNumber,\n shouldRetry,\n updateRetryData,\n} from \"../util/exponentialBackoffStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - Maximum number of retries.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n * @returns An instance of the {@link SystemErrorRetryPolicy}\n */\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * A policy that retries when there's a system error, identified by the codes \"ETIMEDOUT\", \"ESOCKETTIMEDOUT\", \"ECONNREFUSED\", \"ECONNRESET\" or \"ENOENT\".\n * @param retryCount - The client retry count.\n * @param retryInterval - The client retry interval, in milliseconds.\n * @param minRetryInterval - The minimum retry interval, in milliseconds.\n * @param maxRetryInterval - The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n\n function shouldPolicyRetry(_response?: HttpOperationResponse, error?: RetryError): boolean {\n if (\n error &&\n error.code &&\n (error.code === \"ETIMEDOUT\" ||\n error.code === \"ESOCKETTIMEDOUT\" ||\n error.code === \"ECONNREFUSED\" ||\n error.code === \"ECONNRESET\" ||\n error.code === \"ENOENT\")\n ) {\n return true;\n }\n return false;\n }\n\n if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (nestedErr: any) {\n return retry(policy, request, operationResponse, nestedErr, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Maximum number of retries for the throttling retry policy\n */\nexport const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport { AbortError } from \"@azure/abort-controller\";\nimport { Constants } from \"../util/constants\";\nimport { DEFAULT_CLIENT_MAX_RETRY_COUNT } from \"../util/throttlingRetryStrategy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { delay } from \"../util/delay\";\n\ntype ResponseHandler = (\n httpRequest: WebResourceLike,\n response: HttpOperationResponse\n) => Promise;\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n * @returns\n */\nexport function throttlingRetryPolicy(): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new ThrottlingRetryPolicy(nextPolicy, options);\n },\n };\n}\n\nconst StandardAbortMessage = \"The operation was aborted.\";\n\n/**\n * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons.\n * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header.\n *\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private _handleResponse: ResponseHandler;\n private numberOfRetries = 0;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n _handleResponse?: ResponseHandler\n ) {\n super(nextPolicy, options);\n this._handleResponse = _handleResponse || this._defaultResponseHandler;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n const response = await this._nextPolicy.sendRequest(httpRequest.clone());\n if (\n response.status !== StatusCodes.TooManyRequests &&\n response.status !== StatusCodes.ServiceUnavailable\n ) {\n return response;\n } else {\n return this._handleResponse(httpRequest, response);\n }\n }\n\n private async _defaultResponseHandler(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse\n ): Promise {\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader) {\n const delayInMs: number | undefined =\n ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader);\n if (delayInMs) {\n this.numberOfRetries += 1;\n\n await delay(delayInMs, undefined, {\n abortSignal: httpRequest.abortSignal,\n abortErrorMsg: StandardAbortMessage,\n });\n\n if (httpRequest.abortSignal?.aborted) {\n throw new AbortError(StandardAbortMessage);\n }\n\n if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) {\n return this.sendRequest(httpRequest);\n } else {\n return this._nextPolicy.sendRequest(httpRequest);\n }\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error: any) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./requestPolicy\";\nimport {\n Span,\n SpanKind,\n SpanStatusCode,\n createSpanFunction,\n getTraceParentHeader,\n isSpanContextValid,\n} from \"@azure/core-tracing\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport { logger } from \"../log\";\n\nconst createSpan = createSpanFunction({\n packagePrefix: \"\",\n namespace: \"\",\n});\n\n/**\n * Options to customize the tracing policy.\n */\nexport interface TracingPolicyOptions {\n /**\n * User agent used to better identify the outgoing requests traced by the tracing policy.\n */\n userAgent?: string;\n}\n\n/**\n * Creates a policy that wraps outgoing requests with a tracing span.\n * @param tracingOptions - Tracing options.\n * @returns An instance of the {@link TracingPolicy} class.\n */\nexport function tracingPolicy(tracingOptions: TracingPolicyOptions = {}): RequestPolicyFactory {\n return {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n return new TracingPolicy(nextPolicy, options, tracingOptions);\n },\n };\n}\n\n/**\n * A policy that wraps outgoing requests with a tracing span.\n */\nexport class TracingPolicy extends BaseRequestPolicy {\n private userAgent?: string;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n tracingOptions: TracingPolicyOptions\n ) {\n super(nextPolicy, options);\n this.userAgent = tracingOptions.userAgent;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n if (!request.tracingContext) {\n return this._nextPolicy.sendRequest(request);\n }\n\n const span = this.tryCreateSpan(request);\n\n if (!span) {\n return this._nextPolicy.sendRequest(request);\n }\n\n try {\n const response = await this._nextPolicy.sendRequest(request);\n this.tryProcessResponse(span, response);\n return response;\n } catch (err: any) {\n this.tryProcessError(span, err);\n throw err;\n }\n }\n\n tryCreateSpan(request: WebResourceLike): Span | undefined {\n try {\n // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier.\n // We can pass this as a separate parameter once we upgrade to the latest core-tracing.\n const { span } = createSpan(`HTTP ${request.method}`, {\n tracingOptions: {\n spanOptions: {\n ...(request as any).spanOptions,\n kind: SpanKind.CLIENT,\n },\n tracingContext: request.tracingContext,\n },\n });\n\n // If the span is not recording, don't do any more work.\n if (!span.isRecording()) {\n span.end();\n return undefined;\n }\n\n const namespaceFromContext = request.tracingContext?.getValue(Symbol.for(\"az.namespace\"));\n\n if (typeof namespaceFromContext === \"string\") {\n span.setAttribute(\"az.namespace\", namespaceFromContext);\n }\n\n span.setAttributes({\n \"http.method\": request.method,\n \"http.url\": request.url,\n requestId: request.requestId,\n });\n\n if (this.userAgent) {\n span.setAttribute(\"http.user_agent\", this.userAgent);\n }\n\n // set headers\n const spanContext = span.spanContext();\n const traceParentHeader = getTraceParentHeader(spanContext);\n if (traceParentHeader && isSpanContextValid(spanContext)) {\n request.headers.set(\"traceparent\", traceParentHeader);\n const traceState = spanContext.traceState && spanContext.traceState.serialize();\n // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent\n if (traceState) {\n request.headers.set(\"tracestate\", traceState);\n }\n }\n return span;\n } catch (error: any) {\n logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`);\n return undefined;\n }\n }\n\n private tryProcessError(span: Span, err: any): void {\n try {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: err.message,\n });\n\n if (err.statusCode) {\n span.setAttribute(\"http.status_code\", err.statusCode);\n }\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n\n private tryProcessResponse(span: Span, response: HttpOperationResponse): void {\n try {\n span.setAttribute(\"http.status_code\", response.status);\n const serviceRequestId = response.headers.get(\"x-ms-request-id\");\n if (serviceRequestId) {\n span.setAttribute(\"serviceRequestId\", serviceRequestId);\n }\n span.setStatus({\n code: SpanStatusCode.OK,\n });\n span.end();\n } catch (error: any) {\n logger.warning(`Skipping tracing span processing due to an error: ${error.message}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as utils from \"./util/utils\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport {\n DefaultDeserializationOptions,\n DeserializationContentTypes,\n deserializationPolicy,\n} from \"./policies/deserializationPolicy\";\nimport { DefaultKeepAliveOptions, keepAlivePolicy } from \"./policies/keepAlivePolicy\";\nimport { DefaultRedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport { DefaultRetryOptions, exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { LogPolicyOptions, logPolicy } from \"./policies/logPolicy\";\nimport {\n OperationParameter,\n ParameterPath,\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n} from \"./operationParameter\";\nimport { OperationSpec, getStreamResponseStatusCodes } from \"./operationSpec\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResource,\n WebResourceLike,\n isWebResourceLike,\n} from \"./webResource\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"./policies/requestPolicy\";\nimport { SerializerOptions, XML_ATTRKEY, XML_CHARKEY } from \"./util/serializer.common\";\nimport { ServiceCallback, isNode } from \"./util/utils\";\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport {\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n userAgentPolicy,\n} from \"./policies/userAgentPolicy\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { InternalPipelineOptions } from \"./pipelineOptions\";\nimport { OperationArguments } from \"./operationArguments\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { URLBuilder } from \"./url\";\nimport { bearerTokenAuthenticationPolicy } from \"./policies/bearerTokenAuthenticationPolicy\";\nimport { disableResponseDecompressionPolicy } from \"./policies/disableResponseDecompressionPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport { getCachedDefaultHttpClient } from \"./httpClientCache\";\nimport { logger } from \"./log\";\nimport { ndJsonPolicy } from \"./policies/ndJsonPolicy\";\nimport { proxyPolicy } from \"./policies/proxyPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { stringifyXML } from \"./util/xml\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { tracingPolicy } from \"./policies/tracingPolicy\";\n\n/**\n * Options to configure a proxy for outgoing requests (Node.js only).\n */\nexport interface ProxySettings {\n /**\n * The proxy's host address.\n */\n host: string;\n\n /**\n * The proxy host's port.\n */\n port: number;\n\n /**\n * The user name to authenticate with the proxy, if required.\n */\n username?: string;\n\n /**\n * The password to authenticate with the proxy, if required.\n */\n password?: string;\n}\n\n/**\n * An alias of {@link ProxySettings} for future use.\n */\nexport type ProxyOptions = ProxySettings;\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-useragent\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * If specified, will be used to build the BearerTokenAuthenticationPolicy.\n */\n credentialScopes?: string | string[];\n}\n\n/**\n * ServiceClient sends service requests and receives responses.\n */\nexport class ServiceClient {\n /**\n * If specified, this is the base URI that requests will be made against for this ServiceClient.\n * If it is not specified, then all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptions;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @param credentials - The credentials used for authentication with the service.\n * @param options - The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: TokenCredential | ServiceClientCredentials,\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || getCachedDefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n logger.info(\"ServiceClient: using custom request policies\");\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n let authPolicyFactory: RequestPolicyFactory | undefined = undefined;\n if (isTokenCredential(credentials)) {\n logger.info(\n \"ServiceClient: creating bearer token authentication policy from provided credentials\"\n );\n // Create a wrapped RequestPolicyFactory here so that we can provide the\n // correct scope to the BearerTokenAuthenticationPolicy at the first time\n // one is requested. This is needed because generated ServiceClient\n // implementations do not set baseUri until after ServiceClient's constructor\n // is finished, leaving baseUri empty at the time when it is needed to\n // build the correct scope name.\n const wrappedPolicyFactory: () => RequestPolicyFactory = () => {\n let bearerTokenPolicyFactory: RequestPolicyFactory | undefined = undefined;\n // eslint-disable-next-line @typescript-eslint/no-this-alias\n const serviceClient = this;\n const serviceClientOptions = options;\n return {\n create(nextPolicy: RequestPolicy, createOptions: RequestPolicyOptions): RequestPolicy {\n const credentialScopes = getCredentialScopes(\n serviceClientOptions,\n serviceClient.baseUri\n );\n\n if (!credentialScopes) {\n throw new Error(\n `When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`\n );\n }\n\n if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) {\n bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(\n credentials,\n credentialScopes\n );\n }\n\n return bearerTokenPolicyFactory.create(nextPolicy, createOptions);\n },\n };\n };\n\n authPolicyFactory = wrappedPolicyFactory();\n } else if (credentials && typeof credentials.signRequest === \"function\") {\n logger.info(\"ServiceClient: creating signing policy from provided credentials\");\n authPolicyFactory = signingPolicy(credentials);\n } else if (credentials !== undefined && credentials !== null) {\n throw new Error(\"The credentials argument must implement the TokenCredential interface\");\n }\n\n logger.info(\"ServiceClient: using default request policies\");\n requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options);\n if (options.requestPolicyFactories) {\n // options.requestPolicyFactories can also be a function that manipulates\n // the default requestPolicyFactories array\n const newRequestPolicyFactories: void | RequestPolicyFactory[] =\n options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error: any) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param operationArguments - The arguments that the HTTP request's templated values will be populated from.\n * @param operationSpec - The OperationSpec to use to populate the httpRequest.\n * @param callback - The callback to call when the response is received.\n */\n async sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const serializerOptions = operationArguments.options?.serializerOptions;\n const httpRequest: WebResourceLike = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter),\n serializerOptions\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue !== undefined && queryParameterValue !== null) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter),\n serializerOptions\n );\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null\n ) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n // The collection is empty, no need to try serializing the current queryParam\n continue;\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] =\n item === undefined || item === null ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat !== undefined &&\n queryParameter.collectionFormat !== null &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType && operationSpec.requestBody) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue !== undefined && headerValue !== null) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter),\n serializerOptions\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n\n if (options.spanOptions) {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n (httpRequest as any).spanOptions = options.spanOptions;\n }\n\n if (options.tracingContext) {\n httpRequest.tracingContext = options.tracingContext;\n }\n\n if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) {\n httpRequest.shouldDeserialize = options.shouldDeserialize;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseStatusCodes === undefined) {\n httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec);\n }\n\n let rawResponse: HttpOperationResponse;\n let sendRequestError;\n try {\n rawResponse = await this.sendRequest(httpRequest);\n } catch (error: any) {\n sendRequestError = error;\n }\n if (sendRequestError) {\n if (sendRequestError.response) {\n sendRequestError.details = flattenResponse(\n sendRequestError.response,\n operationSpec.responses[sendRequestError.statusCode] ||\n operationSpec.responses[\"default\"]\n );\n }\n result = Promise.reject(sendRequestError);\n } else {\n result = Promise.resolve(\n flattenResponse(rawResponse!, operationSpec.responses[rawResponse!.status])\n );\n }\n } catch (error: any) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n const serializerOptions = operationArguments.options?.serializerOptions ?? {};\n const updatedOptions: Required = {\n rootName: serializerOptions.rootName ?? \"\",\n includeRoot: serializerOptions.includeRoot ?? false,\n xmlCharKey: serializerOptions.xmlCharKey ?? XML_CHARKEY,\n };\n\n const xmlCharKey = serializerOptions.xmlCharKey;\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } =\n bodyMapper;\n const typeName = bodyMapper.type.name;\n\n try {\n if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString,\n updatedOptions\n );\n\n const isStream = typeName === MapperType.Stream;\n\n if (operationSpec.isXML) {\n const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : \"xmlns\";\n const value = getXmlValueWithNamespace(\n xmlNamespace,\n xmlnsKey,\n typeName,\n httpRequest.body,\n updatedOptions\n );\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n value,\n xmlElementName || xmlName || serializedName!,\n xmlnsKey,\n xmlNamespace\n ),\n {\n rootName: xmlName || serializedName,\n xmlCharKey,\n }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(value, {\n rootName: xmlName || serializedName,\n xmlCharKey,\n });\n }\n } else if (\n typeName === MapperType.String &&\n (operationSpec.contentType?.match(\"text/plain\") || operationSpec.mediaType === \"text\")\n ) {\n // the String serializer has validated that request body is a string\n // so just send the string.\n return;\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error: any) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue !== undefined && formDataParameterValue !== null) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter),\n updatedOptions\n );\n }\n }\n }\n}\n\n/**\n * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself\n */\nfunction getXmlValueWithNamespace(\n xmlNamespace: string | undefined,\n xmlnsKey: string,\n typeName: string,\n serializedValue: any,\n options: Required\n): any {\n // Composite and Sequence schemas already got their root namespace set during serialization\n // We just need to add xmlns to the other schema types\n if (xmlNamespace && ![\"Composite\", \"Sequence\", \"Dictionary\"].includes(typeName)) {\n const result: any = {};\n result[options.xmlCharKey] = serializedValue;\n result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace };\n return result;\n }\n\n return serializedValue;\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n authPolicyFactory: RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (authPolicyFactory) {\n factories.push(authPolicyFactory);\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n factories.push(redirectPolicy());\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n if (isNode) {\n factories.push(proxyPolicy(options.proxySettings));\n }\n\n factories.push(logPolicy({ logger: logger.info }));\n\n return factories;\n}\n\n/**\n * Creates an HTTP pipeline based on the given options.\n * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client.\n * @param authPolicyFactory - An optional authentication policy factory to use for signing requests.\n * @returns A set of options that can be passed to create a new {@link ServiceClient}.\n */\nexport function createPipelineFromOptions(\n pipelineOptions: InternalPipelineOptions,\n authPolicyFactory?: RequestPolicyFactory\n): ServiceClientOptions {\n const requestPolicyFactories: RequestPolicyFactory[] = [];\n\n if (pipelineOptions.sendStreamingJson) {\n requestPolicyFactories.push(ndJsonPolicy());\n }\n\n let userAgentValue = undefined;\n if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) {\n const userAgentInfo: string[] = [];\n userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix);\n\n // Add the default user agent value if it isn't already specified\n // by the userAgentPrefix option.\n const defaultUserAgentInfo = getDefaultUserAgentValue();\n if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) {\n userAgentInfo.push(defaultUserAgentInfo);\n }\n\n userAgentValue = userAgentInfo.join(\" \");\n }\n\n const keepAliveOptions = {\n ...DefaultKeepAliveOptions,\n ...pipelineOptions.keepAliveOptions,\n };\n\n const retryOptions = {\n ...DefaultRetryOptions,\n ...pipelineOptions.retryOptions,\n };\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...pipelineOptions.redirectOptions,\n };\n\n if (isNode) {\n requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));\n }\n\n const deserializationOptions = {\n ...DefaultDeserializationOptions,\n ...pipelineOptions.deserializationOptions,\n };\n\n const loggingOptions: LogPolicyOptions = {\n ...pipelineOptions.loggingOptions,\n };\n\n requestPolicyFactories.push(\n tracingPolicy({ userAgent: userAgentValue }),\n keepAlivePolicy(keepAliveOptions),\n userAgentPolicy({ value: userAgentValue }),\n generateClientRequestIdPolicy(),\n deserializationPolicy(deserializationOptions.expectedContentTypes),\n throttlingRetryPolicy(),\n systemErrorRetryPolicy(),\n exponentialRetryPolicy(\n retryOptions.maxRetries,\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n )\n );\n\n if (redirectOptions.handleRedirects) {\n requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n if (authPolicyFactory) {\n requestPolicyFactories.push(authPolicyFactory);\n }\n\n requestPolicyFactories.push(logPolicy(loggingOptions));\n\n if (isNode && pipelineOptions.decompressResponse === false) {\n requestPolicyFactories.push(disableResponseDecompressionPolicy());\n }\n\n return {\n httpClient: pipelineOptions.httpClient,\n requestPolicyFactories,\n };\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n const serializerOptions = operationArguments.options?.serializerOptions;\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions);\n if (propertyValue !== undefined && propertyValue !== null) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent !== undefined && parent !== null && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\n/**\n * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}).\n * @param _response - Wrapper object for http response.\n * @param responseSpec - Mappers for how to parse the response properties.\n * @returns - A normalized response object.\n */\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = >(\n obj: T\n ): T & {\n _response: HttpOperationResponse;\n } => {\n return Object.defineProperty(obj, \"_response\", {\n value: _response,\n }) as T & {\n _response: HttpOperationResponse;\n };\n };\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n const arrayResponse = [...(_response.parsedBody || [])] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n\nfunction getCredentialScopes(\n options?: ServiceClientOptions,\n baseUri?: string\n): string | string[] | undefined {\n if (options?.credentialScopes) {\n const scopes = options.credentialScopes;\n return Array.isArray(scopes)\n ? scopes.map((scope) => new URL(scope).toString())\n : new URL(scopes).toString();\n }\n\n if (baseUri) {\n return `${baseUri}/.default`;\n }\n return undefined;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// NOTE: we've moved this code into core-tracing but these functions\n// were a part of the GA'd library and can't be removed until the next major\n// release. They currently get called always, even if tracing is not enabled.\n\nimport { Span, createSpanFunction as coreTracingCreateSpanFunction } from \"@azure/core-tracing\";\nimport { OperationOptions } from \"./operationOptions\";\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use core-tracing instead.\n * @hidden\n */\nexport interface SpanConfig {\n /**\n * Package name prefix\n */\n packagePrefix: string;\n /**\n * Service namespace\n */\n namespace: string;\n}\n\n/**\n * This function is only here for compatibility. Use createSpanFunction in core-tracing.\n *\n * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing.\n * @hidden\n\n * @param spanConfig - The name of the operation being performed.\n * @param tracingOptions - The options for the underlying http request.\n */\nexport function createSpanFunction(\n args: SpanConfig\n): (\n operationName: string,\n operationOptions: T\n) => { span: Span; updatedOptions: T } {\n return coreTracingCreateSpanFunction(args);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken } from \"@azure/core-auth\";\n\n/**\n * Defines the default token refresh buffer duration.\n */\nexport const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes\n\n/**\n * Provides a cache for an AccessToken that was that\n * was returned from a TokenCredential.\n */\nexport interface AccessTokenCache {\n /**\n * Sets the cached token.\n *\n * @param accessToken - The {@link AccessToken} to be cached or null to\n * clear the cached token.\n */\n setCachedToken(accessToken: AccessToken | undefined): void;\n\n /**\n * Returns the cached {@link AccessToken} or undefined if nothing is cached.\n */\n getCachedToken(): AccessToken | undefined;\n}\n\n/**\n * Provides an {@link AccessTokenCache} implementation which clears\n * the cached {@link AccessToken}'s after the expiresOnTimestamp has\n * passed.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class ExpiringAccessTokenCache implements AccessTokenCache {\n private tokenRefreshBufferMs: number;\n private cachedToken?: AccessToken = undefined;\n\n /**\n * Constructs an instance of {@link ExpiringAccessTokenCache} with\n * an optional expiration buffer time.\n */\n constructor(tokenRefreshBufferMs: number = TokenRefreshBufferMs) {\n this.tokenRefreshBufferMs = tokenRefreshBufferMs;\n }\n\n /**\n * Saves an access token into the internal in-memory cache.\n * @param accessToken - Access token or undefined to clear the cache.\n */\n setCachedToken(accessToken: AccessToken | undefined): void {\n this.cachedToken = accessToken;\n }\n\n /**\n * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon.\n */\n getCachedToken(): AccessToken | undefined {\n if (\n this.cachedToken &&\n Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp\n ) {\n this.cachedToken = undefined;\n }\n\n return this.cachedToken;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential } from \"@azure/core-auth\";\n\n/**\n * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token.\n *\n * @deprecated No longer used in the bearer authorization policy.\n */\nexport class AccessTokenRefresher {\n private promise: Promise | undefined;\n private lastCalled = 0;\n\n constructor(\n private credential: TokenCredential,\n private scopes: string | string[],\n private requiredMillisecondsBeforeNewRefresh: number = 30000\n ) {}\n\n /**\n * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying\n * that we are ready for a new refresh.\n */\n public isReady(): boolean {\n // We're only ready for a new refresh if the required milliseconds have passed.\n return (\n !this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh\n );\n }\n\n /**\n * Stores the time in which it is called,\n * then requests a new token,\n * then sets this.promise to undefined,\n * then returns the token.\n */\n private async getToken(options: GetTokenOptions): Promise {\n this.lastCalled = Date.now();\n const token = await this.credential.getToken(this.scopes, options);\n this.promise = undefined;\n return token || undefined;\n }\n\n /**\n * Requests a new token if we're not currently waiting for a new token.\n * Returns null if the required time between each call hasn't been reached.\n */\n public refresh(options: GetTokenOptions): Promise {\n if (!this.promise) {\n this.promise = this.getToken(options);\n }\n\n return this.promise;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\n/**\n * A simple {@link ServiceClientCredential} that authenticates with a username and a password.\n */\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n /**\n * Username\n */\n userName: string;\n\n /**\n * Password\n */\n password: string;\n\n /**\n * Authorization scheme. Defaults to \"Basic\".\n * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes\n */\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @param userName - User name.\n * @param password - Password.\n * @param authorizationScheme - The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param webResource - The WebResourceLike to be signed.\n * @returns The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ApiKeyCredentialOptions, ApiKeyCredentials } from \"./apiKeyCredentials\";\n\n/**\n * A {@link TopicCredentials} object used for Azure Event Grid.\n */\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @param topicKey - The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","tunnel","inspect","createClientLogger","Transform","tough","abortController","AbortController","AbortError","FormData","https","http","node_fetch","HttpPipelineLogLevel","__rest","xml2js","StandardAbortMessage","RetryMode","retry","logger","coreLogger","os","QueryCollectionFormat","DefaultHttpClient","utils.generateUuid","createSpanFunction","SpanKind","getTraceParentHeader","isSpanContextValid","SpanStatusCode","isTokenCredential","utils.prepareXMLRootList","utils.isPrimitiveType","coreTracingCreateSpanFunction","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;AAEG;AACH,SAAS,YAAY,CAAC,UAAkB,EAAA;AACtC,IAAA,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4EK,SAAU,iBAAiB,CAAC,MAAgB,EAAA;AAChD,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAWlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,UAAU,KAAK,UAAU;AAC3C,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU;AACtC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,GAAG,KAAK,UAAU;AACpC,YAAA,OAAO,UAAU,CAAC,QAAQ,KAAK,UAAU;AACzC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU;AACvC,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,YAAY,KAAK,UAAU;AAC7C,YAAA,OAAO,UAAU,CAAC,WAAW,KAAK,UAAU;AAC5C,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,UAAU,EACvC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;AAEG;MACU,WAAW,CAAA;AAGtB,IAAA,WAAA,CAAY,UAA2B,EAAA;AACrC,QAAA,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;AACtB,QAAA,IAAI,UAAU,EAAE;AACd,YAAA,KAAK,MAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;AAC9C,aAAA;AACF,SAAA;KACF;AAED;;;;;AAKG;IACI,GAAG,CAAC,UAAkB,EAAE,WAA4B,EAAA;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;AAC3C,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;KACH;AAED;;;;AAIG;AACI,IAAA,GAAG,CAAC,UAAkB,EAAA;QAC3B,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,UAAkB,EAAA;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;AAED;;;;AAIG;AACI,IAAA,MAAM,CAAC,UAAkB,EAAA;QAC9B,MAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;AAClD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,UAAU,GAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC;KAC5C;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,OAAO,GAAiB,EAAE,CAAC;AACjC,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;AAEG;IACI,WAAW,GAAA;QAChB,MAAM,WAAW,GAAa,EAAE,CAAC;AACjC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;AACnC,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACI,YAAY,GAAA;QACjB,MAAM,YAAY,GAAa,EAAE,CAAC;AAClC,QAAA,MAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;AAClD,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;AACD,QAAA,OAAO,YAAY,CAAC;KACrB;AAED;;AAEG;IACI,MAAM,CAAC,UAAsC,EAAE,EAAA;QACpD,MAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpC,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;AACvD,gBAAA,MAAM,CAAC,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AAClD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;KAC5D;AAED;;AAEG;IACI,KAAK,GAAA;QACV,MAAM,sBAAsB,GAAmB,EAAE,CAAC;AAClD,QAAA,KAAK,MAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,sBAAsB,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;AACpD,SAAA;AACD,QAAA,OAAO,IAAI,WAAW,CAAC,sBAAsB,CAAC,CAAC;KAChD;AACF;;AC5PD;AACA;AAEA;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;AAGG;AACG,SAAU,eAAe,CAAC,KAAiB,EAAA;;;IAG/C,MAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;AAC/F,IAAA,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;AAGG;AACG,SAAU,YAAY,CAAC,KAAa,EAAA;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC;;AC5BA;AACA;AACA;;AAEG;AACU,MAAA,SAAS,GAAG;AACvB;;AAEG;AACH,IAAA,eAAe,EAAE,OAAO;AAExB;;AAEG;AACH,IAAA,IAAI,EAAE,OAAO;AAEb;;AAEG;AACH,IAAA,KAAK,EAAE,QAAQ;AAEf;;AAEG;AACH,IAAA,UAAU,EAAE,YAAY;AAExB;;AAEG;AACH,IAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,IAAA,QAAQ,EAAE,UAAU;AAEpB;;AAEG;AACH,IAAA,SAAS,EAAE,WAAW;AAEtB,IAAA,aAAa,EAAE;AACb;;AAEG;AACH,QAAA,SAAS,EAAE;AACT,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,GAAG,EAAE,KAAK;AACV,YAAA,MAAM,EAAE,QAAQ;AAChB,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACd,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,KAAK,EAAE,OAAO;AACf,SAAA;AAED,QAAA,WAAW,EAAE;AACX,YAAA,eAAe,EAAE,GAAG;AACpB,YAAA,kBAAkB,EAAE,GAAG;AACxB,SAAA;AACF,KAAA;AAED;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAE9B,QAAA,oBAAoB,EAAE,QAAQ;AAE9B;;;;AAIG;AACH,QAAA,WAAW,EAAE,aAAa;AAE1B;;AAEG;AACH,QAAA,UAAU,EAAE,YAAY;AACzB,KAAA;;;ACnFH;AACA;AAEA;;AAEG;AACI,MAAM,WAAW,GAAG,IAAI;AAC/B;;AAEG;AACI,MAAM,WAAW,GAAG;;ACV3B;AAUA,MAAM,cAAc,GAClB,gFAAgF,CAAC;AAEnF;;AAEG;AACU,MAAA,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;AAClB,IAAA,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK;AAY1B;;;;;AAKG;AACG,SAAU,SAAS,CAAC,GAAW,EAAA;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;AAC3B,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,QAA+B,EAAA;IAC3D,MAAM,gBAAgB,GAAQ,EAAE,CAAC;AACjC,IAAA,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;AAC5C,IAAA,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;AAC5C,IAAA,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;AAC1C,IAAA,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;AAMG;AACG,SAAU,YAAY,CAAC,OAAwB,EAAA;AACnD,IAAA,MAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;AAC3B,QAAA,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;AAKG;AACG,SAAU,WAAW,CAAC,IAAY,EAAA;AACtC,IAAA,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;AAIG;SACa,YAAY,GAAA;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;AAQG;AACa,SAAA,2BAA2B,CACzC,gBAA4B,EAC5B,SAAkB,EAAA;IAElB,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxC,IAAA,gBAAgB,CAAC,OAAO,CAAC,CAAC,cAAc,KAAI;AAC1C,QAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;AACvC,KAAC,CAAC,CAAC;AACH,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAqBD;;;;;AAKG;AACH;AACM,SAAU,iBAAiB,CAAC,OAAqB,EAAA;AACrD,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;;IAED,OAAO,CAAC,EAAY,KAAU;QAC5B,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAAS,KAAI;;AAElB,YAAA,OAAO,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;AAC7B,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;;YAEpB,EAAE,CAAC,GAAG,CAAC,CAAC;AACV,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,wBAAwB,CACtC,OAAuC,EAAA;AAEvC,IAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;AACtC,QAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;AACzD,KAAA;IACD,OAAO,CAAC,EAAsB,KAAU;QACtC,OAAO;AACJ,aAAA,IAAI,CAAC,CAAC,IAA2B,KAAI;AACpC,YAAA,OAAO,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACnF,SAAC,CAAC;AACD,aAAA,KAAK,CAAC,CAAC,GAAU,KAAI;AACpB,YAAA,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;AAC5B,SAAC,CAAC,CAAC;AACP,KAAC,CAAC;AACJ,CAAC;AAEK,SAAU,kBAAkB,CAChC,GAAY,EACZ,WAAmB,EACnB,eAAwB,EACxB,YAAqB,EAAA;AAErB,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;AACvB,QAAA,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;AACb,KAAA;AAED,IAAA,IAAI,CAAC,eAAe,IAAI,CAAC,YAAY,EAAE;AACrC,QAAA,OAAO,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;AAC/B,KAAA;IAED,MAAM,MAAM,GAAG,EAAE,CAAC,WAAW,GAAG,GAAG,EAAE,CAAC;IACtC,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,eAAe,GAAG,YAAY,EAAE,CAAC;AAC1D,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIG;AACa,SAAA,WAAW,CAAC,eAAwB,EAAE,WAAkB,EAAA;IACtE,MAAM,mBAAmB,GAAG,eAE3B,CAAC;AACF,IAAA,WAAW,CAAC,OAAO,CAAC,CAAC,UAAU,KAAI;AACjC,QAAA,MAAM,CAAC,mBAAmB,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,CAAC,IAAI,KAAI;AAChE,YAAA,mBAAmB,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;AACnE,SAAC,CAAC,CAAC;AACL,KAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,mBAAmB,GACvB,qKAAqK,CAAC;AAExK;;;;AAIG;AACG,SAAU,UAAU,CAAC,KAAa,EAAA;AACtC,IAAA,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;AAMG;SACa,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB,EAAA;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;AAKG;AACG,SAAU,eAAe,CAAC,KAAc,EAAA;AAC5C,IAAA,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;AAEK,SAAU,mBAAmB,CAAC,IAAY,EAAA;AAC9C,IAAA,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;AACrB,QAAA,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC1B,KAAA;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AACxC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAOD;;;AAGG;AACG,SAAU,QAAQ,CAAC,KAAc,EAAA;AACrC,IAAA,QACE,OAAO,KAAK,KAAK,QAAQ;AACzB,QAAA,KAAK,KAAK,IAAI;AACd,QAAA,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC;AACrB,QAAA,EAAE,KAAK,YAAY,MAAM,CAAC;AAC1B,QAAA,EAAE,KAAK,YAAY,IAAI,CAAC,EACxB;AACJ;;ACxRA;AAQA;AAEA;;;;AAIG;MACU,UAAU,CAAA;AACrB,IAAA,WAAA;AACE;;AAEG;AACa,IAAA,YAAA,GAAuC,EAAE;AACzD;;AAEG;IACa,KAAe,EAAA;QAJf,IAAY,CAAA,YAAA,GAAZ,YAAY,CAA6B;QAIzC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAU;KAC7B;AAEJ;;;;;AAKG;AACH,IAAA,mBAAmB,CAAC,MAAc,EAAE,KAAc,EAAE,UAAkB,EAAA;AACpE,QAAA,MAAM,cAAc,GAAG,CACrB,cAAuC,EACvC,eAAoB,KACX;AACT,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,CAAA,EAAI,UAAU,CAAA,cAAA,EAAiB,KAAK,CAAA,iCAAA,EAAoC,cAAc,CAAA,GAAA,EAAM,eAAe,CAAA,CAAA,CAAG,CAC/G,CAAC;AACJ,SAAC,CAAC;AACF,QAAA,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YAC5C,MAAM,aAAa,GAAG,KAAe,CAAC;YACtC,MAAM,EACJ,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,gBAAgB,EAChB,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,SAAS,EACT,UAAU,EACV,OAAO,EACP,WAAW,GACZ,GAAG,MAAM,CAAC,WAAW,CAAC;AACvB,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,IAAI,gBAAgB,EAAE;AACtE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;AACD,YAAA,IAAI,gBAAgB,IAAI,SAAS,IAAI,aAAa,GAAG,gBAAgB,EAAE;AACrE,gBAAA,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACtD,aAAA;YACD,MAAM,YAAY,GAAG,KAAc,CAAC;YACpC,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,QAAQ,EAAE;AAC3D,gBAAA,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,YAAY,CAAC,MAAM,GAAG,SAAS,EAAE;AAC7D,gBAAA,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;AACxC,aAAA;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,aAAa,GAAG,UAAU,KAAK,CAAC,EAAE;AAC/D,gBAAA,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC1C,aAAA;AACD,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,MAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;AACpF,gBAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;AAC9D,oBAAA,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,IACE,WAAW;gBACX,YAAY,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,CAAS,EAAE,EAAc,KAAK,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,EACnF;AACA,gBAAA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;KACF;AAED;;;;;;;;AAQG;IACH,SAAS,CACP,MAAc,EACd,MAAe,EACf,UAAmB,EACnB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,OAAO,GAAQ,EAAE,CAAC;AACtB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YAC5C,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;AAC9B,SAAA;;;;;;;;;;AAYD,QAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,MAAM,CAAC;AAEtC,QAAA,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,qBAAA,CAAuB,CAAC,CAAC;AACvD,SAAA;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;AAChD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,6BAAA,CAA+B,CAAC,CAAC;AAC/D,SAAA;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;AACtD,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,gBAAA,CAAkB,CAAC,CAAC;AAClD,SAAA;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;AAClB,SAAA;AAAM,aAAA;;YAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,QAAQ,CAAC,KAAK,IAAI,EAAE;gBACvC,OAAO,GAAG,MAAM,CAAC;AAClB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,+CAA+C,CAAC,KAAK,IAAI,EAAE;gBACrF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;AAC/D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBAC/C,MAAM,UAAU,GAAe,MAAoB,CAAC;AACpD,gBAAA,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;AAChF,aAAA;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,sDAAsD,CAAC,KAAK,IAAI,EACjF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC9D,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAoB,CAAC,CAAC;AACpE,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,OAAO,GAAG,qBAAqB,CAC7B,IAAI,EACJ,MAAwB,EACxB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAA0B,EAC1B,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,sBAAsB,CAC9B,IAAI,EACJ,MAAyB,EACzB,MAAM,EACN,UAAU,EACV,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EACnB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,MAAc,EACd,YAAqB,EACrB,UAAkB,EAClB,UAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,cAAc,GAAgC;AAClD,YAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,YAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,YAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;SAC9C,CAAC;QACF,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,YAAA,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;;AAED,YAAA,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;AACrC,gBAAA,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;AACpC,aAAA;AACD,YAAA,OAAO,YAAY,CAAC;AACrB,SAAA;AAED,QAAA,IAAI,OAAY,CAAC;AACjB,QAAA,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;AACf,YAAA,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;AACrC,SAAA;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,OAAO,GAAG,wBAAwB,CAChC,IAAI,EACJ,MAAyB,EACzB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,gBAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;gBAC7C,MAAM,gBAAgB,GAAG,YAAuC,CAAC;AACjE;;;;AAIG;AACH,gBAAA,IACE,gBAAgB,CAAC,WAAW,CAAC,IAAI,SAAS;AAC1C,oBAAA,gBAAgB,CAAC,UAAU,CAAC,IAAI,SAAS,EACzC;AACA,oBAAA,YAAY,GAAG,gBAAgB,CAAC,UAAU,CAAC,CAAC;AAC7C,iBAAA;AACF,aAAA;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC1C,gBAAA,OAAO,GAAG,UAAU,CAAC,YAAsB,CAAC,CAAC;AAC7C,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;AAChB,iBAAA;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;AACjB,iBAAA;AAAM,qBAAA;oBACL,OAAO,GAAG,YAAY,CAAC;AACxB,iBAAA;AACF,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,kDAAkD,CAAC,KAAK,IAAI,EAAE;gBACxF,OAAO,GAAG,YAAY,CAAC;AACxB,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,oCAAoC,CAAC,KAAK,IAAI,EAAE;AAC1E,gBAAA,OAAO,GAAG,IAAI,IAAI,CAAC,YAAsB,CAAC,CAAC;AAC5C,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,cAAc,CAAC,YAAsB,CAAC,CAAC;AAClD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAGC,YAAmB,CAAC,YAAsB,CAAC,CAAC;AACvD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;AACpD,gBAAA,OAAO,GAAG,oBAAoB,CAAC,YAAsB,CAAC,CAAC;AACxD,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACnD,gBAAA,OAAO,GAAG,uBAAuB,CAC/B,IAAI,EACJ,MAAwB,EACxB,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;AACrD,gBAAA,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,EACV,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;AACrB,YAAA,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;AAC/B,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;KAChB;AACF,CAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU,EAAA;AACtC,IAAA,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;AACrB,IAAA,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;AAC1C,QAAA,EAAE,GAAG,CAAC;AACP,KAAA;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW,EAAA;IACpC,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;AACnC,QAAA,MAAM,IAAI,KAAK,CAAC,CAAA,uEAAA,CAAyE,CAAC,CAAC;AAC5F,KAAA;;IAED,MAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW,EAAA;IACvC,IAAI,CAAC,GAAG,EAAE;AACR,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC5C,QAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,KAAA;;AAED,IAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;;AAEhD,IAAA,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB,EAAA;IAClD,MAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;AACtB,IAAA,IAAI,IAAI,EAAE;QACR,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAEjC,QAAA,KAAK,MAAM,IAAI,IAAI,QAAQ,EAAE;AAC3B,YAAA,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;AACzC,gBAAA,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AACvD,aAAA;AAAM,iBAAA;gBACL,YAAY,IAAI,IAAI,CAAC;AACrB,gBAAA,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;AACnB,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB,EAAA;IACtC,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACnC,QAAA,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;AAC3B,KAAA;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS,EAAA;IAC/B,IAAI,CAAC,CAAC,EAAE;AACN,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU,EAAA;AAC3E,IAAA,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AACxC,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA0B,wBAAA,CAAA,CAAC,CAAC;AAC9E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAChF,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AAC7C,YAAA,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAgB,aAAA,EAAA,KAAK,CAA4C,0CAAA,CAAA,CAC/E,CAAC;AACH,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;AAChD,YAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAC,CAAA,EAAG,UAAU,CAAe,YAAA,EAAA,KAAK,CAA2B,yBAAA,CAAA,CAAC,CAAC;AAC/E,aAAA;AACF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,KAAK,IAAI,EAAE;AAC/C,YAAA,MAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;AACvB,gBAAA,UAAU,KAAK,UAAU;AACzB,gBAAA,EAAE,KAAK,YAAY,WAAW,CAAC;AAC/B,gBAAA,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;AAC1B,gBAAA,EAAE,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,OAAO,IAAI,KAAK,QAAQ,KAAK,KAAK,YAAY,IAAI,CAAC,EACpF;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,GAAG,UAAU,CAAA,qGAAA,CAAuG,CACrH,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU,EAAA;IAClF,IAAI,CAAC,aAAa,EAAE;AAClB,QAAA,MAAM,IAAI,KAAK,CACb,qDAAqD,UAAU,CAAA,iBAAA,CAAmB,CACnF,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,CAAC,IAAI,KAAI;AAC5C,QAAA,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;AACnD,SAAA;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;AACxB,KAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,CAAG,EAAA,KAAK,6BAA6B,UAAU,CAAA,wBAAA,EAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,CAAA,CAAA,CAAG,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;AAC7C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAiB,EAAA;IACnE,IAAI,WAAW,GAAW,EAAE,CAAC;IAC7B,IAAI,KAAK,IAAI,SAAS,EAAE;AACtB,QAAA,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,4BAAA,CAA8B,CAAC,CAAC;AAC9D,SAAA;AACD,QAAA,WAAW,GAAG,iBAAiB,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC;AAC9C,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB,EAAA;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;AACtC,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK;AACH,gBAAA,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;AACtC,sBAAE,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACtD,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,0DAAA,CAA4D,CAAC,CAAC;AAC5F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,oBAAoB,CAAC,KAAK,IAAI,EAAE;AACxD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,2DAAA,CAA6D,CAAC,CAAC;AAC7F,aAAA;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AACrF,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;AACA,gBAAA,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAqE,mEAAA,CAAA;AAChF,oBAAA,CAAA,iDAAA,CAAmD,CACtD,CAAC;AACH,aAAA;AACD,YAAA,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;AAC/B,SAAA;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;AACjD,YAAA,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACb,CAAA,EAAG,UAAU,CAAsD,mDAAA,EAAA,KAAK,CAAI,EAAA,CAAA,CAC7E,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAC1B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,uBAAA,CAAyB,CAAC,CAAC;AACzD,KAAA;AACD,IAAA,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACxC,IAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;AACtC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAE1F,QAAA,IAAI,KAAK,IAAI,WAAW,CAAC,YAAY,EAAE;AACrC,YAAA,MAAM,QAAQ,GAAG,WAAW,CAAC,kBAAkB;AAC7C,kBAAE,CAAA,MAAA,EAAS,WAAW,CAAC,kBAAkB,CAAE,CAAA;kBACzC,OAAO,CAAC;AACZ,YAAA,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,KAAK,WAAW,EAAE;AACzC,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAQ,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAE,CAAC;AACtC,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC;gBAClB,SAAS,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AACnD,gBAAA,SAAS,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,WAAW,CAAC,YAAY,EAAE,CAAC;AACtE,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,CAAC,CAAC,GAAG,eAAe,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,QAAA,MAAM,IAAI,KAAK,CAAC,GAAG,UAAU,CAAA,wBAAA,CAA0B,CAAC,CAAC;AAC1D,KAAA;AACD,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AACpC,IAAA,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAG,CAAA,CAAA,CAC1D,CAAC;AACH,KAAA;IACD,MAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAE;AACrC,QAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;;AAE1F,QAAA,cAAc,CAAC,GAAG,CAAC,GAAG,iBAAiB,CAAC,SAAS,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACrF,KAAA;;AAGD,IAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,QAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB,GAAG,CAAS,MAAA,EAAA,MAAM,CAAC,kBAAkB,CAAA,CAAE,GAAG,OAAO,CAAC;QAE5F,MAAM,MAAM,GAAG,cAAc,CAAC;AAC9B,QAAA,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,EAAE,CAAC;AAC1D,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKG;AACH,SAAS,2BAA2B,CAClC,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,oBAAoB,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IAE9D,IAAI,CAAC,oBAAoB,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE;QAClD,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,OAAO,WAAW,aAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,oBAAoB,CAAC;AAC/C,KAAA;AAED,IAAA,OAAO,oBAAoB,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACH,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;IACxC,IAAI,CAAC,SAAS,EAAE;AACd,QAAA,MAAM,IAAI,KAAK,CACb,yBAAyB,UAAU,CAAA,iCAAA,EAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,CAAA,EAAA,CAAI,CACN,CAAC;AACH,KAAA;AAED,IAAA,OAAO,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AAC5C,CAAC;AAED;;;;AAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB,EAAA;AAElB,IAAA,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,WAAW,GAAG,uBAAuB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC5E,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,CAAmD,gDAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SAAS,CAAI,EAAA,CAAA,CAAC,CAAC;AAC/F,SAAA;QACD,UAAU,GAAG,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,IAAI,CAAC,eAAe,CAAC;QAC/C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,CAAqD,mDAAA,CAAA;AACnD,gBAAA,CAAA,QAAA,EAAW,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CACpC,WAAA,EAAA,MAAM,CAAC,IAAI,CAAC,SACd,CAAA,cAAA,EAAiB,UAAU,CAAA,EAAA,CAAI,CAClC,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB,EAClB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;AACzE,KAAA;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,MAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,YAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;AACV,aAAA;AAED,YAAA,IAAI,QAA4B,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,oBAAA,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;AACnC,iBAAA;AAAM,qBAAA;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;AACpE,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;AACjE,gBAAA,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;AAEvB,gBAAA,KAAK,MAAM,QAAQ,IAAI,KAAK,EAAE;AAC5B,oBAAA,MAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IACE,WAAW,IAAI,SAAS;AACxB,yBAAC,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,CAAC,EACvE;AACA,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;AAC7B,qBAAA;AACD,oBAAA,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACvC,iBAAA;AACF,aAAA;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;AAC7B,gBAAA,IAAI,KAAK,IAAI,MAAM,CAAC,YAAY,EAAE;AAChC,oBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,kBAAkB;AACxC,0BAAE,CAAA,MAAA,EAAS,MAAM,CAAC,kBAAkB,CAAE,CAAA;0BACpC,OAAO,CAAC;AACZ,oBAAA,YAAY,CAAC,WAAW,CAAC,GACpB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,YAAY,CAAC,WAAW,CAAC,CAC5B,EAAA,EAAA,CAAC,QAAQ,GAAG,MAAM,CAAC,YAAY,GAChC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;AAClC,sBAAE,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;AAEjB,gBAAA,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,gBAAA,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;AACA,oBAAA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;AACrC,iBAAA;AAED,gBAAA,MAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AAEF,gBAAA,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;AAC1D,oBAAA,MAAM,KAAK,GAAG,iBAAiB,CAAC,cAAc,EAAE,eAAe,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;AACjF,oBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAI1C,YAAY,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;wBAC5D,YAAY,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;AACvD,qBAAA;AAAM,yBAAA,IAAI,KAAK,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/C,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,cAAc,CAAC,cAAe,GAAG,KAAK,EAAE,CAAC;AACtE,qBAAA;AAAM,yBAAA;AACL,wBAAA,YAAY,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC;AAChC,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;QAED,MAAM,0BAA0B,GAAG,2BAA2B,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;AAC/F,QAAA,IAAI,0BAA0B,EAAE;YAC9B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1C,YAAA,KAAK,MAAM,cAAc,IAAI,MAAM,EAAE;AACnC,gBAAA,MAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAE,KAAK,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,EACzC,OAAO,CACR,CAAC;AACH,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC;AAChB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,iBAAiB,CACxB,cAAsB,EACtB,eAAoB,EACpB,KAAc,EACd,OAAoC,EAAA;AAEpC,IAAA,IAAI,CAAC,KAAK,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAO,eAAe,CAAC;AACxB,KAAA;AAED,IAAA,MAAM,QAAQ,GAAG,cAAc,CAAC,kBAAkB;AAChD,UAAE,CAAA,MAAA,EAAS,cAAc,CAAC,kBAAkB,CAAE,CAAA;UAC5C,OAAO,CAAC;IACZ,MAAM,YAAY,GAAG,EAAE,CAAC,QAAQ,GAAG,cAAc,CAAC,YAAY,EAAE,CAAC;AAEjE,IAAA,IAAI,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpD,QAAA,IAAI,eAAe,CAAC,WAAW,CAAC,EAAE;AAChC,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,MAAM,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAa,eAAe,CAAE,CAAC;AAC3C,YAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AACF,KAAA;IACD,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;AAC7C,IAAA,MAAM,CAAC,WAAW,CAAC,GAAG,YAAY,CAAC;AACnC,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB,EAAE,OAAoC,EAAA;AACtF,IAAA,OAAO,CAAC,WAAW,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAClE,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;;AAEpC,IAAA,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;AACnF,KAAA;IAED,MAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,MAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAE;AACzC,QAAA,MAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,EAAE,cAAc,EAAE,OAAO,EAAE,cAAc,EAAE,GAAG,cAAc,CAAC;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;AACpC,QAAA,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;AACzD,YAAA,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;AACxD,SAAA;AAED,QAAA,MAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;AAC3F,QAAA,IAAI,sBAAsB,EAAE;YAC1B,MAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAK,MAAM,SAAS,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AACjD,gBAAA,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;AAChD,oBAAA,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AAED,gBAAA,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;AAC5B,SAAA;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,WAAW,CAAC,EAAE;gBAC9D,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,WAAW,CAAC,CAAC,OAAQ,CAAC,EACnC,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B;;;;;;;;;;;;;AAaE;AACF,oBAAA,MAAM,OAAO,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;AACvC,oBAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAG,cAAe,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;AACrD,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,WAAW,EACX,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;AAC7C,oBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,OAAO,CACR,CAAC;AACH,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;;AAEL,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;AAEvB,YAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;AACxB,gBAAA,IAAI,CAAC,GAAG;oBAAE,MAAM;AAChB,gBAAA,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;YACD,gBAAgB,GAAG,GAAG,CAAC;AACvB,YAAA,MAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;AAUtE,YAAA,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;AACA,gBAAA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;AAC1C,aAAA;AAED,YAAA,IAAI,eAAe,CAAC;;AAEpB,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;AAC7E,gBAAA,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACrC,gBAAA,MAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;;;AAGF,gBAAA,KAAK,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE;AAC7C,oBAAA,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,EAAE;AAC3D,wBAAA,aAAa,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;AACtB,qBAAA;AACF,iBAAA;gBACD,QAAQ,GAAG,aAAa,CAAC;AAC1B,aAAA;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;AACtF,gBAAA,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,EAClB,OAAO,CACR,CAAC;AACF,gBAAA,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;AACjC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,MAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;AACpE,IAAA,IAAI,0BAA0B,EAAE;AAC9B,QAAA,MAAM,oBAAoB,GAAG,CAAC,gBAAwB,KAAa;AACjE,YAAA,KAAK,MAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,MAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;AAC5E,gBAAA,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;AACjC,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC;AAEF,QAAA,KAAK,MAAM,gBAAgB,IAAI,YAAY,EAAE;AAC3C,YAAA,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,EAC3C,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,YAAY,EAAE;QACvB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;AAC3B,gBAAA,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;AACnC,gBAAA,CAAC,oBAAoB,CAAC,GAAG,EAAE,OAAO,CAAC,EACnC;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;AACnC,aAAA;AACF,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;AAChC,IAAA,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,CAA2D,yDAAA,CAAA;YACzD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;QAChB,MAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE;AAC3C,YAAA,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;AAC7F,SAAA;AACD,QAAA,OAAO,cAAc,CAAC;AACvB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB,EAClB,OAAoC,EAAA;AAEpC,IAAA,MAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;AACpC,IAAA,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,CAAwD,sDAAA,CAAA;YACtD,CAA0C,uCAAA,EAAA,UAAU,CAAE,CAAA,CACzD,CAAC;AACH,KAAA;AACD,IAAA,IAAI,YAAY,EAAE;AAChB,QAAA,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;AAEhC,YAAA,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;AAC/B,SAAA;QAED,MAAM,SAAS,GAAG,EAAE,CAAC;AACrB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACnC,OAAO,EACP,YAAY,CAAC,CAAC,CAAC,EACf,CAAA,EAAG,UAAU,CAAA,CAAA,EAAI,CAAC,CAAG,CAAA,CAAA,EACrB,OAAO,CACR,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD,EAAA;IAExD,MAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;AAC5F,IAAA,IAAI,wBAAwB,EAAE;AAC5B,QAAA,MAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;AAClC,YAAA,MAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;AACnC,gBAAA,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;AACjE,gBAAA,MAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;AAC7B,sBAAE,kBAAkB;AACpB,sBAAE,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,MAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;AACrF,gBAAA,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB,EAAA;AAEvB,IAAA,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB,EAAA;AAClF,IAAA,QACE,QAAQ;AACR,QAAA,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AAyTD;;AAEG;AACG,SAAU,eAAe,CAAC,WAAoB,EAAA;IAClD,MAAM,eAAe,GAAG,WAAsC,CAAC;IAC/D,IAAI,WAAW,IAAI,SAAS;AAAE,QAAA,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;AACrC,QAAA,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;AAClD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;AACtC,QAAA,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,MAAM,KAAK,GAAG,EAAE,CAAC;AACjB,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAAM,SAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,MAAM,UAAU,GAA2B,EAAE,CAAC;AAC9C,QAAA,KAAK,MAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC,CAAC;AACnE,SAAA;AACD,QAAA,OAAO,UAAU,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;AACH,SAAS,OAAO,CAAmB,CAAW,EAAA;IAC5C,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,IAAA,KAAK,MAAM,GAAG,IAAI,CAAC,EAAE;AACnB,QAAA,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;AACnB,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH;AACO,MAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;AACX,CAAA;;ACp9CD;AAmKM,SAAU,iBAAiB,CAAC,MAAe,EAAA;AAC/C,IAAA,IAAI,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACxC,MAAM,UAAU,GAAG,MAOlB,CAAC;AACF,QAAA,IACE,OAAO,UAAU,CAAC,GAAG,KAAK,QAAQ;AAClC,YAAA,OAAO,UAAU,CAAC,MAAM,KAAK,QAAQ;AACrC,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,QAAQ;AACtC,YAAA,iBAAiB,CAAC,UAAU,CAAC,OAAO,CAAC;AACrC,YAAA,OAAO,UAAU,CAAC,yBAAyB,KAAK,UAAU;AAC1D,YAAA,OAAO,UAAU,CAAC,OAAO,KAAK,UAAU;AACxC,YAAA,OAAO,UAAU,CAAC,KAAK,KAAK,UAAU,EACtC;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;AAKG;MACU,WAAW,CAAA;AAsGtB,IAAA,WAAA,CACE,GAAY,EACZ,MAAoB,EACpB,IAAc,EACd,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,kBAA4B,EAC5B,yBAAuC,EAAA;AAEvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,yBAAyB,GAAG,yBAAyB,CAAC;AAC3D,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;AAC9B,QAAA,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;AAC/E,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;AACnB,QAAA,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;AAC1B,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;AAChD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;AAC5B,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,QAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,YAAY,EAAE,CAAC;KAC/E;AAED;;;;AAIG;IACH,yBAAyB,GAAA;AACvB,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,SAAA;AACD,QAAA,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;AACb,YAAA,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;AACjD,SAAA;KACF;AAED;;;;AAIG;AACH,IAAA,OAAO,CAAC,OAA8B,EAAA;QACpC,IAAI,CAAC,OAAO,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;AAED,QAAA,IACE,OAAO,CAAC,MAAM,KAAK,SAAS;YAC5B,OAAO,CAAC,MAAM,KAAK,IAAI;YACvB,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;AACvC,YAAA,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;AACH,SAAA;AAED,QAAA,IACE,CAAC,OAAO,CAAC,YAAY,KAAK,SAAS;YACjC,OAAO,CAAC,YAAY,KAAK,IAAI;YAC7B,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;AACpD,aAAC,OAAO,CAAC,GAAG,KAAK,SAAS;gBACxB,OAAO,CAAC,GAAG,KAAK,IAAI;gBACpB,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EAC5C;AACA,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;AACf,YAAA,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;AACnC,gBAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;AAC1D,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;AACxB,SAAA;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;AAClB,YAAA,MAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;AAC3F,YAAA,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;AACrB,oBAAA,OAAO,CAAC,MAAM;oBACd,4CAA4C;AAC5C,oBAAA,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;AACH,aAAA;AACF,SAAA;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,YAAA,MAAM,EAAE,YAAY,EAAE,cAAc,EAAE,GAAG,OAAO,CAAC;AACjD,YAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,gBAAA,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;AACnE,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;AAClD,aAAA;AACD,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,GAAG,GACL,OAAO;AACP,iBAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,YAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC;YACxE,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,uBAAuB,CAAC,CAAC;AACpD,YAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,cAAc,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,wEAAA,CAA0E,CACxG,CAAC;AACH,iBAAA;AACD,gBAAA,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI,EAAA;oBAC7B,MAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;AACxC,oBAAA,MAAM,SAAS,GAAI,cAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;AAClB,wBAAA,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;AACA,wBAAA,MAAM,yBAAyB,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,EAAE,SAAS,EAAE,CAAC,CAAC,CAAC;AAC/E,wBAAA,MAAM,IAAI,KAAK,CACb,iBAAiB,YAAY,CAAA,6BAAA,EAAgC,aAAa,CAAE,CAAA;AAC1E,4BAAA,CAAA,2CAAA,EAA8C,yBAAyB,CAAG,CAAA,CAAA;AAC1E,4BAAA,CAAA,uEAAA,EAA0E,aAAa,CAA6B,2BAAA,CAAA;4BACpH,CAAwC,qCAAA,EAAA,aAAa,CAA6D,2DAAA,CAAA,CACrH,CAAC;AACH,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;AACxD,qBAAA;AAED,oBAAA,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AAC3C,wBAAA,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;AACpB,4BAAA,MAAM,IAAI,KAAK,CACb,0BAA0B,aAAa,CAAA,iEAAA,CAAmE,CAC3G,CAAC;AACH,yBAAA;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;AAC1C,yBAAA;AAAM,6BAAA;AACL,4BAAA,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9D,yBAAA;AACF,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;AAChB,SAAA;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,MAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;AAChD,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,CAA6E,2EAAA,CAAA;oBAC3E,CAAqF,mFAAA,CAAA;AACrF,oBAAA,CAAA,yIAAA,CAA2I,CAC9I,CAAC;AACH,aAAA;;AAED,YAAA,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;AAC5C,gBAAA,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;AACjB,aAAA;;YAED,MAAM,WAAW,GAAG,EAAE,CAAC;;AAEvB,YAAA,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;AAChB,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;AAC5C,gBAAA,MAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;AACxD,gBAAA,IAAI,UAAU,EAAE;AACd,oBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,wBAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;AAC7D,qBAAA;AAAM,yBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AACzC,wBAAA,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;AACrB,4BAAA,MAAM,IAAI,KAAK,CACb,2BAA2B,cAAc,CAAA,iEAAA,CAAmE,CAC7G,CAAC;AACH,yBAAA;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;AAC/C,yBAAA;AAAM,6BAAA;AACL,4BAAA,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;AAC9E,4BAAA,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;AACnE,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,SAAA;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,YAAA,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAK,MAAM,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AACrD,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;AACnD,aAAA;AACF,SAAA;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;AAC9C,SAAA;;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AAC5D,SAAA;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AACrE,SAAA;;AAGD,QAAA,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,IAAI,OAAO,CAAC,IAAI,KAAK,IAAI,EAAE;;YAEvD,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;AAClD,iBAAA;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;AAC9D,iBAAA;AACF,aAAA;AAAM,iBAAA;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;AACH,iBAAA;AACD,gBAAA,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC1C,iBAAA;AACF,aAAA;AACF,SAAA;QAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxC,SAAA;QAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,IAAI,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACvC,QAAA,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AACrD,QAAA,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AAEjD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;IACH,KAAK,GAAA;AACH,QAAA,MAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,yBAAyB,CAC/B,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;AACjC,SAAA;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC3C,SAAA;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;AAC1B,YAAA,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;AACnD,SAAA;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;AAChC,YAAA,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;AAC/D,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;AChmBD;AAOA;;AAEG;MACU,QAAQ,CAAA;AAArB,IAAA,WAAA,GAAA;QACmB,IAAS,CAAA,SAAA,GAAwD,EAAE,CAAC;KAiItF;AA/HC;;AAEG;IACI,GAAG,GAAA;AACR,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;AAED;;AAEG;IACI,IAAI,GAAA;QACT,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KACpC;AAED;;;;AAIG;IACI,GAAG,CAAC,aAAqB,EAAE,cAAuB,EAAA;QACvD,MAAM,kBAAkB,GAAG,cAE1B,CAAC;AACF,QAAA,IAAI,aAAa,EAAE;AACjB,YAAA,IAAI,kBAAkB,KAAK,SAAS,IAAI,kBAAkB,KAAK,IAAI,EAAE;AACnE,gBAAA,MAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAChD,sBAAE,kBAAkB;AACpB,sBAAE,kBAAkB,CAAC,QAAQ,EAAE,CAAC;AAClC,gBAAA,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;AAC1C,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACtC,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,GAAG,CAAC,aAAqB,EAAA;AAC9B,QAAA,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;AAED;;AAEG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,QAAA,KAAK,MAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;AAC1C,YAAA,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;YACD,MAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrD,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,MAAM,gBAAgB,GAAG,EAAE,CAAC;AAC5B,gBAAA,KAAK,MAAM,qBAAqB,IAAI,cAAc,EAAE;oBAClD,gBAAgB,CAAC,IAAI,CAAC,CAAA,EAAG,aAAa,CAAI,CAAA,EAAA,qBAAqB,CAAE,CAAA,CAAC,CAAC;AACpE,iBAAA;AACD,gBAAA,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtC,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,CAAG,EAAA,aAAa,CAAI,CAAA,EAAA,cAAc,EAAE,CAAC;AAChD,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAE9B,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,gBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,aAAA;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;AACxB,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,gBAAA,MAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;AACzC,gBAAA,QAAQ,YAAY;AAClB,oBAAA,KAAK,eAAe;AAClB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;AAER,4BAAA,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;AAER,4BAAA;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,gBAAgB;AACnB,wBAAA,QAAQ,gBAAgB;AACtB,4BAAA,KAAK,GAAG;AACN,gCAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;AAER,4BAAA;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;AACT,yBAAA;wBACD,MAAM;AAER,oBAAA;AACE,wBAAA,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;AACzE,iBAAA;AACF,aAAA;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;AACrC,gBAAA,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AAC3C,aAAA;AACF,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAED;;AAEG;MACU,UAAU,CAAA;AAOrB;;;AAGG;AACI,IAAA,SAAS,CAAC,MAA0B,EAAA;QACzC,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;AAC1B,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;AAC5B,SAAA;KACF;AAED;;AAEG;IACI,SAAS,GAAA;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AAClC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAiC,EAAA;QAC9C,IAAI,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;AACtD,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;AACnC,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,IAAwB,EAAA;QACrC,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,SAAA;AAAM,aAAA;YACL,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACxC,YAAA,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AAC9E,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,UAAU,CAAC,IAAwB,EAAA;AACxC,QAAA,IAAI,IAAI,EAAE;AACR,YAAA,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;AACrD,YAAA,IAAI,WAAW,EAAE;AACf,gBAAA,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;AACpB,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;AACxB,oBAAA,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;AAC1B,iBAAA;AAED,gBAAA,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;AAC3B,aAAA;AACD,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;AACxB,SAAA;KACF;AAED;;AAEG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACI,IAAA,QAAQ,CAAC,KAAyB,EAAA;QACvC,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;AACzB,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AACrC,SAAA;KACF;AAED;;;;AAIG;IACI,iBAAiB,CAAC,kBAA0B,EAAE,mBAA4B,EAAA;AAC/E,QAAA,IAAI,kBAAkB,EAAE;AACtB,YAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;AAChB,gBAAA,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;AAC9B,aAAA;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;AAC1D,SAAA;KACF;AAED;;;AAGG;AACI,IAAA,sBAAsB,CAAC,kBAA0B,EAAA;AACtD,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;AAED;;AAEG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;AAED;;AAEG;IACK,GAAG,CAAC,IAAY,EAAE,UAA6B,EAAA;QACrD,MAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;AAErD,QAAA,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;AACvB,YAAA,MAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;AACxD,YAAA,IAAI,SAA6B,CAAC;AAClC,YAAA,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;AAChB,oBAAA,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;AAER,oBAAA,KAAK,MAAM;AACT,wBAAA,SAAS,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;AACpC,wBAAA,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;AAC1D,4BAAA,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;AACxB,yBAAA;wBACD,MAAM;AAER,oBAAA,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;AAER,oBAAA;wBACE,MAAM,IAAI,KAAK,CAAC,CAAA,2BAAA,EAA8B,KAAK,CAAC,IAAI,CAAE,CAAA,CAAC,CAAC;AAC/D,iBAAA;AACF,aAAA;AACF,SAAA;KACF;AAED;;;AAGG;IACI,QAAQ,GAAA;QACb,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,MAAM,IAAI,CAAG,EAAA,IAAI,CAAC,OAAO,KAAK,CAAC;AAChC,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,MAAM,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,KAAK,EAAE,CAAC;AAC5B,SAAA;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;AACf,aAAA;AACD,YAAA,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;AACtB,SAAA;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAA,CAAE,CAAC;AACxC,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;AAGG;IACI,UAAU,CAAC,WAAmB,EAAE,YAAoB,EAAA;AACzD,QAAA,IAAI,WAAW,EAAE;AACf,YAAA,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACxE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACpE,YAAA,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;AACvE,SAAA;KACF;AAED;;AAEG;IACI,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,MAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;AAChC,QAAA,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;AACnC,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;MAMY,QAAQ,CAAA;IACnB,WAAmC,CAAA,IAAY,EAAkB,IAAkB,EAAA;QAAhD,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAQ;QAAkB,IAAI,CAAA,IAAA,GAAJ,IAAI,CAAc;KAAI;IAEhF,OAAO,MAAM,CAAC,IAAY,EAAA;AAC/B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,IAAI,CAAC,IAAY,EAAA;AAC7B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEM,OAAO,KAAK,CAAC,IAAY,EAAA;AAC9B,QAAA,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;AACF,CAAA;AAED;;;AAGG;AACG,SAAU,uBAAuB,CAAC,SAAiB,EAAA;IACvD,MAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACtD,IAAA,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;AACtD,SAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;AAEG;MACU,YAAY,CAAA;IAMvB,WAA4B,CAAA,KAAa,EAAE,KAAyB,EAAA;QAAxC,IAAK,CAAA,KAAA,GAAL,KAAK,CAAQ;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,aAAa,GAAG,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,GAAG,KAAK,GAAG,gBAAgB,CAAC;AACtF,QAAA,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;AAED;;;AAGG;IACI,OAAO,GAAA;QACZ,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AAED;;AAEG;IACI,IAAI,GAAA;AACT,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;AAC9B,YAAA,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;AAChC,SAAA;AAAM,aAAA;YACL,QAAQ,IAAI,CAAC,aAAa;AACxB,gBAAA,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;AAER,gBAAA,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;AAER,gBAAA,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AAER,gBAAA;oBACE,MAAM,IAAI,KAAK,CAAC,CAAA,gCAAA,EAAmC,IAAI,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;AAC5E,aAAA;AACF,SAAA;AACD,QAAA,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;AACF,CAAA;AAED;;AAEG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAA;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;AAChB,IAAA,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAC5D,QAAA,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACjD,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;AAClD,IAAA,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;AAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB,EAAA;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;AAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa,EAAA;AAC3D,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;AACV,SAAA;AACD,QAAA,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;AACjC,KAAA;AACH,CAAC;AAED;;;AAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB,EAAA;AACvE,IAAA,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;AAClE,IAAA,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;AACpC,QAAA,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;AAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC,EAAA;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;AAEhB,IAAA,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACrC,QAAA,MAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;AAChE,QAAA,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;AACP,SAAA;AAAM,aAAA;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB,EAAA;AACrD,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC,SAAiB,KAAK,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC;AACzF,CAAC;AAED;;;AAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB,EAAE,GAAG,qBAA+B,EAAA;AACrF,IAAA,OAAO,SAAS,CACd,SAAS,EACT,CAAC,SAAiB,KAAK,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB,EAAA;AACzC,IAAA,MAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;AAClD,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB,EAAA;AAC/C,IAAA,MAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;AAC1E,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;AACxD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AACF,KAAA;AAAM,SAAA;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AACtD,QAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AAC1C,YAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,SAAA;AAAM,aAAA;AACL,YAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,SAAA;AACF,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;AAC1C,QAAA,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;AACvC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;IAED,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;AACjD,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB,EAAA;IACvC,MAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAE9C,IAAA,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;AACnC,QAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;AACnC,KAAA;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB,EAAA;AACxC,IAAA,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;AAC1B,KAAA;AAED,IAAA,MAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAChD,IAAA,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC;;ACtqBA;SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB,EAAA;AAEzB,IAAA,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY,CAAC;IACtE,IAAI,CAAC,IAAI,EAAE;AACT,QAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAClE,KAAA;AACD,IAAA,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,IAAI,CAAC,EAAE;AACpC,QAAA,MAAM,IAAI,KAAK,CAAC,6EAA6E,CAAC,CAAC;AAChG,KAAA;AACD,IAAA,MAAM,aAAa,GAAiC;AAClD,QAAA,KAAK,EAAE;AACL,YAAA,IAAI,EAAE,IAAI;YACV,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;AACjD,SAAA;KACF,CAAC;AAEF,IAAA,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;AACpD,QAAA,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,CAAG,EAAA,aAAa,CAAC,QAAQ,CAAI,CAAA,EAAA,aAAa,CAAC,QAAQ,EAAE,CAAC;AACxF,KAAA;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,GAAG,aAAa,CAAC,QAAQ,CAAA,CAAE,CAAC;AAC9D,KAAA;AAED,IAAA,MAAM,cAAc,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC;IAC9C,MAAM,YAAY,GAAG,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAEpD,IAAA,MAAM,UAAU,GAAG;AACjB,QAAA,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;AAEF,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAEK,SAAU,UAAU,CAAC,GAAW,EAAA;AACpC,IAAA,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;AAC1D,IAAA,OAAO,SAAS,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;AAC7C,CAAC;SAEe,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAA2C,EAAA;IAE3C,IAAI,cAAc,IAAI,YAAY,EAAE;AAClC,QAAA,OAAOG,iBAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;AAC7C,KAAA;AAAM,SAAA,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;AAC1C,QAAA,OAAOA,iBAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;AAC5C,KAAA;AAAM,SAAA;AACL,QAAA,OAAOA,iBAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;AAC3C,KAAA;AACH,CAAC;AAED,SAAS,WAAW,CAAC,IAAY,EAAA;;;AAG/B,IAAA,OAAO,CAAC,IAAI,IAAI,IAAI,IAAI,IAAI,KAAK,CAAC;AACpC;;ACzEA;AAsBA,MAAM,cAAc,GAAG,UAAU,CAAC;AAElC,MAAM,yBAAyB,GAAG;IAChC,wBAAwB;IACxB,+BAA+B;IAC/B,gBAAgB;IAChB,6BAA6B;IAC7B,iBAAiB;IACjB,mBAAmB;IACnB,OAAO;IACP,0BAA0B;IAC1B,aAAa;IAEb,kCAAkC;IAClC,8BAA8B;IAC9B,8BAA8B;IAC9B,6BAA6B;IAC7B,+BAA+B;IAC/B,wBAAwB;IACxB,gCAAgC;IAChC,+BAA+B;IAC/B,QAAQ;IAER,QAAQ;IACR,iBAAiB;IACjB,eAAe;IACf,YAAY;IACZ,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,MAAM;IACN,SAAS;IACT,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,eAAe;IACf,QAAQ;IACR,YAAY;IACZ,aAAa;IACb,QAAQ;IACR,mBAAmB;IACnB,YAAY;IACZ,kBAAkB;CACnB,CAAC;AAEF,MAAM,6BAA6B,GAAa,CAAC,aAAa,CAAC,CAAC;MAEnD,SAAS,CAAA;IAIpB,WAAY,CAAA,EAAE,kBAAkB,GAAG,EAAE,EAAE,sBAAsB,GAAG,EAAE,EAAA,GAAuB,EAAE,EAAA;AACzF,QAAA,kBAAkB,GAAG,KAAK,CAAC,OAAO,CAAC,kBAAkB,CAAC;AACpD,cAAE,yBAAyB,CAAC,MAAM,CAAC,kBAAkB,CAAC;cACpD,yBAAyB,CAAC;AAE9B,QAAA,sBAAsB,GAAG,KAAK,CAAC,OAAO,CAAC,sBAAsB,CAAC;AAC5D,cAAE,6BAA6B,CAAC,MAAM,CAAC,sBAAsB,CAAC;cAC5D,6BAA6B,CAAC;QAElC,IAAI,CAAC,kBAAkB,GAAG,IAAI,GAAG,CAAC,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;QAClF,IAAI,CAAC,sBAAsB,GAAG,IAAI,GAAG,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC;KAC3F;AAEM,IAAA,QAAQ,CAAC,GAAY,EAAA;AAC1B,QAAA,MAAM,IAAI,GAAG,IAAI,GAAG,EAAW,CAAC;QAChC,OAAO,IAAI,CAAC,SAAS,CACnB,GAAG,EACH,CAAC,GAAW,EAAE,KAAc,KAAI;;YAE9B,IAAI,KAAK,YAAY,KAAK,EAAE;AAC1B,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,KAAK,CAAA,EAAA,EACR,IAAI,EAAE,KAAK,CAAC,IAAI,EAChB,OAAO,EAAE,KAAK,CAAC,OAAO,EACtB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,GAAG,KAAK,aAAa,EAAE;AACzB,gBAAA,OAAO,IAAI,CAAC,eAAe,CAAC,KAAsB,CAAC,CAAC;AACrD,aAAA;iBAAM,IAAI,GAAG,KAAK,KAAK,EAAE;AACxB,gBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAe,CAAC,CAAC;AAC1C,aAAA;iBAAM,IAAI,GAAG,KAAK,OAAO,EAAE;AAC1B,gBAAA,OAAO,IAAI,CAAC,aAAa,CAAC,KAAsB,CAAC,CAAC;AACnD,aAAA;iBAAM,IAAI,GAAG,KAAK,MAAM,EAAE;;AAEzB,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,UAAU,EAAE;;AAE7B,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,GAAG,KAAK,eAAe,EAAE;;;AAGlC,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;iBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,KAAK,CAAC,EAAE;AAClD,gBAAA,IAAI,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;AACnB,oBAAA,OAAO,YAAY,CAAC;AACrB,iBAAA;AACD,gBAAA,IAAI,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;AACjB,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;SACd,EACD,CAAC,CACF,CAAC;KACH;AAEO,IAAA,eAAe,CAAC,KAAoB,EAAA;QAC1C,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;KAClF;AAEO,IAAA,aAAa,CAAC,KAAoB,EAAA;QACxC,OAAO,IAAI,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KAChF;AAEO,IAAA,cAAc,CACpB,KAAoB,EACpB,WAAwB,EACxB,QAA0C,EAAA;QAE1C,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,SAAS,GAAkB,EAAE,CAAC;QAEpC,KAAK,MAAM,CAAC,IAAI,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;gBACpC,SAAS,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;AACnC,aAAA;AAAM,iBAAA;AACL,gBAAA,SAAS,CAAC,CAAC,CAAC,GAAG,cAAc,CAAC;AAC/B,aAAA;AACF,SAAA;AAED,QAAA,OAAO,SAAS,CAAC;KAClB;AAEO,IAAA,WAAW,CAAC,KAAa,EAAA;QAC/B,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,KAAK,IAAI,EAAE;AAC/C,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC3C,QAAA,MAAM,WAAW,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;QAE1C,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC1C,QAAA,KAAK,MAAM,CAAC,IAAI,KAAK,CAAC,IAAI,EAAE,EAAE;AAC5B,YAAA,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,GAAG,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,EAAE;AACrD,gBAAA,KAAK,CAAC,GAAG,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,UAAU,CAAC,QAAQ,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;AACtC,QAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;KAC9B;AACF;;ACtLD;AAKO,MAAM,MAAM,GAAGC,YAAO,CAAC,MAAM;;ACLpC;AAQA,MAAM,cAAc,GAAG,IAAI,SAAS,EAAE,CAAC;AAEvC;;AAEG;AACG,MAAO,SAAU,SAAQ,KAAK,CAAA;IA8BlC,WACE,CAAA,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAAA;QAEhC,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;AACjB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QAEzB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;KAClD;AAED;;AAEG;AACH,IAAA,CAAC,MAAM,CAAC,GAAA;AACN,QAAA,OAAO,CAAc,WAAA,EAAA,IAAI,CAAC,OAAO,CAAO,IAAA,EAAA,cAAc,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAA,CAAE,CAAC;KACzE;;AAnDD;;AAEG;AACa,SAAkB,CAAA,kBAAA,GAAW,oBAAoB,CAAC;AAClE;;AAEG;AACa,SAAW,CAAA,WAAA,GAAW,aAAa;;ACrBrD;AAGO,MAAM,MAAM,GAAGC,2BAAkB,CAAC,WAAW,CAAC;;ACHrD;AAuBA,SAAS,cAAc,CACrB,OAAgB,EAChB,UAAsB,EAAA;AAEtB,IAAA,OAAO,OAAO,GAAG,UAAU,CAAC,UAAU,GAAG,UAAU,CAAC,SAAS,CAAC;AAChE,CAAC;AA+BK,MAAO,eAAgB,SAAQC,gBAAS,CAAA;AAS5C,IAAA,WAAA,CAAoB,gBAA2D,EAAA;AAC7E,QAAA,KAAK,EAAE,CAAC;QADU,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAA2C;QARvE,IAAW,CAAA,WAAA,GAAW,CAAC,CAAC;KAU/B;AATD,IAAA,UAAU,CAAC,KAAsB,EAAE,SAAiB,EAAE,QAA4B,EAAA;AAChF,QAAA,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACjB,QAAA,IAAI,CAAC,WAAW,IAAI,KAAK,CAAC,MAAM,CAAC;QACjC,IAAI,CAAC,gBAAiB,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAC1D,QAAQ,CAAC,SAAS,CAAC,CAAC;KACrB;AAKF,CAAA;AAED,SAAS,gBAAgB,CAAC,IAAS,EAAA;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB,EAAE,OAAyB,EAAA;AACnE,IAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;AAC7B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,MAAK;AACxB,YAAA,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,KAAK,EAAE,CAAC;AACjB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;AACH,QAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;AAC5B,QAAA,MAAM,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAChC,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;AAEG;AACG,SAAU,YAAY,CAAC,OAAgB,EAAA;AAC3C,IAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,KAAI;AAC7B,QAAA,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAC9B,KAAC,CAAC,CAAC;AAEH,IAAA,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;AAEG;MACU,mBAAmB,CAAA;AAAhC,IAAA,WAAA,GAAA;;AA+LU,QAAA,IAAA,CAAA,aAAa,GAA4B,IAAI,GAAG,EAAE,CAAC;QACnD,IAAe,CAAA,eAAA,GAAe,EAAE,CAAC;AAExB,QAAA,IAAA,CAAA,SAAS,GAAG,IAAIC,gBAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;KAmHlF;AApTC;;;;AAIG;IACH,MAAM,WAAW,CAAC,WAA4B,EAAA;;AAC5C,QAAA,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnD,YAAA,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;AACH,SAAA;AAED,QAAA,MAAMC,iBAAe,GAAG,IAAIC,+BAAe,EAAE,CAAC;AAC9C,QAAA,IAAI,aAAiD,CAAC;QACtD,IAAI,WAAW,CAAC,WAAW,EAAE;AAC3B,YAAA,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;AACnC,gBAAA,MAAM,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,aAAa,GAAG,CAAC,KAAY,KAAI;AAC/B,gBAAA,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oBAC1BF,iBAAe,CAAC,KAAK,EAAE,CAAC;AACzB,iBAAA;AACH,aAAC,CAAC;YACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;AAClE,SAAA;QAED,IAAI,WAAW,CAAC,OAAO,EAAE;YACvB,UAAU,CAAC,MAAK;gBACdA,iBAAe,CAAC,KAAK,EAAE,CAAC;AAC1B,aAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;AACzB,SAAA;QAED,IAAI,WAAW,CAAC,QAAQ,EAAE;AACxB,YAAA,MAAM,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;AAC3C,YAAA,MAAM,WAAW,GAAG,IAAIG,4BAAQ,EAAE,CAAC;AACnC,YAAA,MAAM,eAAe,GAAG,CAAC,GAAW,EAAE,KAAU,KAAU;;AAExD,gBAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oBAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;AACjB,iBAAA;AACD,gBAAA,IACE,KAAK;oBACL,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC;oBACpD,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,KAAK,EAAE,SAAS,CAAC,EACtD;AACA,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;AACrD,iBAAA;AAAM,qBAAA;AACL,oBAAA,WAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;AAChC,iBAAA;AACH,aAAC,CAAC;YACF,KAAK,MAAM,OAAO,IAAI,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAAE;AAC3C,gBAAA,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;AACpC,gBAAA,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;AAC5B,oBAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;AACxC,qBAAA;AACF,iBAAA;AAAM,qBAAA;AACL,oBAAA,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,IAAI,GAAG,WAAW,CAAC;AAC/B,YAAA,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;YACjC,MAAM,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;AACpE,gBAAA,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACjD,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,CAAiC,8BAAA,EAAA,WAAW,CAAC,WAAW,EAAE,CAAA,CAAE,CAC7D,CAAC;AACH,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAC5C,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,IAAI,IAAI,GAAG,WAAW,CAAC,IAAI;AACzB,cAAE,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;AACtC,kBAAE,WAAW,CAAC,IAAI,EAAE;kBAClB,WAAW,CAAC,IAAI;cAClB,SAAS,CAAC;AACd,QAAA,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;AACpD,YAAA,MAAM,gBAAgB,GAAG,WAAW,CAAC,gBAAgB,CAAC;AACtD,YAAA,MAAM,kBAAkB,GAAG,IAAI,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,gBAAA,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;AACL,gBAAA,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,aAAA;YAED,IAAI,GAAG,kBAAkB,CAAC;AAC3B,SAAA;QAED,MAAM,2BAA2B,GAAyB,MAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,CAAC;AAEF,QAAA,MAAM,WAAW,GAAA,MAAA,CAAA,MAAA,CAAA,EACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAEH,iBAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,EACf,EAAA,2BAA2B,CAC/B,CAAC;AAEF,QAAA,IAAI,iBAAoD,CAAC;QACzD,IAAI;AACF,YAAA,MAAM,QAAQ,GAAmB,MAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;YAEhF,MAAM,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AAE/C,YAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,WAAW,CAAC,yBAAyB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC;gBAC3D,WAAW,CAAC,kBAAkB,CAAC;AAEjC,YAAA,iBAAiB,GAAG;AAClB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,OAAO,EAAE,WAAW;gBACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;AACvB,gBAAA,kBAAkB,EAAE,SAAS;sBACxB,QAAQ,CAAC,IAAyC;AACrD,sBAAE,SAAS;AACb,gBAAA,UAAU,EAAE,CAAC,SAAS,GAAG,MAAM,QAAQ,CAAC,IAAI,EAAE,GAAG,SAAS;aAC3D,CAAC;AAEF,YAAA,MAAM,kBAAkB,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAC1D,YAAA,IAAI,kBAAkB,EAAE;AACtB,gBAAA,MAAM,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;AAExF,gBAAA,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;AAClC,oBAAA,MAAM,oBAAoB,GAAG,IAAI,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACrE,oBAAA,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACxC,oBAAA,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;AAC7D,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;AACrE,oBAAA,IAAI,MAAM,EAAE;;AAEV,wBAAA,kBAAkB,CAAC,EAAE,WAAW,EAAE,MAAM,EAAE,CAAC,CAAC;AAC7C,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAE7C,YAAA,OAAO,iBAAiB,CAAC;AAC1B,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,UAAU,GAAe,KAAK,CAAC;AACrC,YAAA,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;AACnC,gBAAA,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;AACH,aAAA;AAAM,iBAAA,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;AACxC,gBAAA,MAAM,IAAIE,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,MAAM,UAAU,CAAC;AAClB,SAAA;AAAS,gBAAA;;AAER,YAAA,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;AAC5C,gBAAA,IAAI,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;AACzC,gBAAA,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;AAC1B,oBAAA,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;AAC3C,iBAAA;AACD,gBAAA,IAAI,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;gBAC3C,IAAI,gBAAgB,CAAC,iBAAiB,KAAjB,IAAA,IAAA,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;oBAC3D,kBAAkB,GAAG,gBAAgB,CACnC,iBAAkB,CAAC,kBAAkB,EACrCF,iBAAe,CAChB,CAAC;AACH,iBAAA;gBAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;qBAChD,IAAI,CAAC,MAAK;;oBACT,CAAA,EAAA,GAAA,WAAW,CAAC,WAAW,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,mBAAmB,CAAC,OAAO,EAAE,aAAc,CAAC,CAAC;oBACtE,OAAO;AACT,iBAAC,CAAC;AACD,qBAAA,KAAK,CAAC,CAAC,CAAC,KAAI;AACX,oBAAA,MAAM,CAAC,OAAO,CAAC,qDAAqD,EAAE,CAAC,CAAC,CAAC;AAC3E,iBAAC,CAAC,CAAC;AACN,aAAA;AACF,SAAA;KACF;AAQO,IAAA,gBAAgB,CAAC,WAA4B,EAAA;;QACnD,MAAM,OAAO,GAAG,UAAU,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;;;;QAK5C,IAAI,WAAW,CAAC,aAAa,EAAE;AAC7B,YAAA,MAAM,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAE,GAAG,WAAW,CAAC,aAAa,CAAC;YACrE,MAAM,GAAG,GAAG,CAAA,EAAG,IAAI,CAAA,CAAA,EAAI,IAAI,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAA,EAAI,QAAQ,CAAA,CAAE,CAAC;AACtD,YAAA,MAAM,WAAW,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,EAAE,CAAC;YAEtD,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACjD,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;AAEF,YAAA,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;YACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,gBAAA,WAAW,CAAC,UAAU,GAAG,MAAM,CAAC,KAAoB,CAAC;AACtD,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;AACtC,aAAA;YACD,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;AAEzC,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;aAAM,IAAI,WAAW,CAAC,SAAS,EAAE;YAChC,IAAI,KAAK,GAAG,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;AAC1D,YAAA,IAAI,KAAK,EAAE;AACT,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAED,YAAA,MAAM,YAAY,GAA2C;gBAC3D,SAAS,EAAE,WAAW,CAAC,SAAS;aACjC,CAAC;AAEF,YAAA,IAAI,OAAO,EAAE;AACX,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,UAAU,GAAG,IAAII,gBAAK,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACzE,aAAA;AAAM,iBAAA;AACL,gBAAA,KAAK,GAAG,IAAI,CAAC,eAAe,CAAC,SAAS,GAAG,IAAIC,eAAI,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACvE,aAAA;AAED,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,OAAO,GAAGD,gBAAK,CAAC,WAAW,GAAGC,eAAI,CAAC,WAAW,CAAC;AACvD,SAAA;KACF;AAED;;AAEG;;AAEH,IAAA,MAAM,KAAK,CAAC,KAAwB,EAAE,IAAwB,EAAA;AAC5D,QAAA,OAAOC,8BAAU,CAAC,KAAK,EAAE,IAAI,CAAuC,CAAC;KACtE;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,WAA4B,EAAA;QAC/C,MAAM,WAAW,GAA+D,EAAE,CAAC;AAEnF,QAAA,IAAI,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE;YACxD,MAAM,YAAY,GAAG,MAAM,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,KAAI;AACjE,gBAAA,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,MAAM,KAAI;AAC/D,oBAAA,IAAI,GAAG,EAAE;wBACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,qBAAA;AAAM,yBAAA;wBACL,OAAO,CAAC,MAAM,CAAC,CAAC;AACjB,qBAAA;AACH,iBAAC,CAAC,CAAC;AACL,aAAC,CAAC,CAAC;YAEH,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;AACjD,SAAA;;QAGD,WAAW,CAAC,KAAK,GAAG,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAEvD,QAAA,WAAW,CAAC,QAAQ,GAAG,WAAW,CAAC,kBAAkB,CAAC;AAEtD,QAAA,OAAO,WAAW,CAAC;KACpB;AAED;;AAEG;IACH,MAAM,cAAc,CAAC,iBAAwC,EAAA;QAC3D,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,MAAM,eAAe,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;YACpE,IAAI,eAAe,KAAK,SAAS,EAAE;gBACjC,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;oBAC1C,IAAI,CAAC,SAAU,CAAC,SAAS,CACvB,eAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,CAAC,GAAG,KAAI;AACN,wBAAA,IAAI,GAAG,EAAE;4BACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,yBAAA;AAAM,6BAAA;AACL,4BAAA,OAAO,EAAE,CAAC;AACX,yBAAA;AACH,qBAAC,CACF,CAAC;AACJ,iBAAC,CAAC,CAAC;AACJ,aAAA;AACF,SAAA;KACF;AACF;;AC7ZD;AACA;AAEA;;AAEG;AACSC,sCAoBX;AApBD,CAAA,UAAY,oBAAoB,EAAA;AAC9B;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,KAAA,CAAA,GAAA,CAAA,CAAA,GAAA,KAAG,CAAA;AAEH;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AAEL;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,SAAA,CAAA,GAAA,CAAA,CAAA,GAAA,SAAO,CAAA;AAEP;;AAEG;AACH,IAAA,oBAAA,CAAA,oBAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACN,CAAC,EApBWA,4BAAoB,KAApBA,4BAAoB,GAoB/B,EAAA,CAAA,CAAA;;AC1BD;AAwDA;;;;AAIG;AACG,SAAU,oCAAoC,CAClD,IAAO,EAAA;AAEP,IAAA,MAAM,EAAE,cAAc,EAAE,cAAc,EAA2B,GAAA,IAAI,EAA1B,iBAAiB,GAAKC,YAAA,CAAA,IAAI,EAA/D,CAAA,gBAAA,EAAA,gBAAA,CAAwD,CAAO,CAAC;IAEtE,IAAI,MAAM,GAAuB,iBAAiB,CAAC;AAEnD,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,GAAQ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,MAAM,CAAK,EAAA,cAAc,CAAE,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,cAAc,EAAE;AAClB,QAAA,MAAM,CAAC,cAAc,GAAG,cAAc,CAAC,cAAc,CAAC;;QAEtD,MAAM,CAAC,WAAW,GAAI,cAAsB,KAAA,IAAA,IAAtB,cAAc,KAAd,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,cAAc,CAAU,WAAW,CAAC;AAC3D,KAAA;AAED,IAAA,OAAO,MAAM,CAAC;AAChB;;AC/EA;AA0BA;;AAEG;MACmB,iBAAiB,CAAA;AACrC;;AAEG;AACH,IAAA,WAAA;AACE;;AAEG;IACM,WAA0B;AACnC;;AAEG;IACM,QAAkC,EAAA;QAJlC,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAI1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAA0B;KACzC;AAQJ;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;AACF,CAAA;AAsBD;;AAEG;MACU,oBAAoB,CAAA;AAC/B,IAAA,WAAA,CAAoB,OAA4B,EAAA;QAA5B,IAAO,CAAA,OAAA,GAAP,OAAO,CAAqB;KAAI;AAEpD;;;;AAIG;AACI,IAAA,SAAS,CAAC,QAA8B,EAAA;AAC7C,QAAA,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKD,4BAAoB,CAAC,GAAG;AACrC,YAAA,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;AAED;;;;;AAKG;IACI,GAAG,CAAC,QAA8B,EAAE,OAAe,EAAA;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;AACrC,SAAA;KACF;AACF;;ACxHD;AAMA;AACA;AACA;AACA;AACA,MAAM,sBAAsB,GAAqB;AAC/C,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,IAAI,EAAE,KAAK;AACX,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,aAAa,EAAE,IAAI;AACnB,IAAA,WAAW,EAAE,KAAK;AAClB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,YAAY,EAAE,IAAI;AAClB,IAAA,SAAS,EAAE,SAAS;AACpB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,gBAAgB,EAAE,KAAK;AACvB,IAAA,qBAAqB,EAAE,KAAK;AAC5B,IAAA,QAAQ,EAAE,IAAI;AACd,IAAA,eAAe,EAAE,KAAK;AACtB,IAAA,iBAAiB,EAAE,KAAK;AACxB,IAAA,KAAK,EAAE,KAAK;AACZ,IAAA,MAAM,EAAE,IAAI;AACZ,IAAA,kBAAkB,EAAE,SAAS;AAC7B,IAAA,mBAAmB,EAAE,SAAS;AAC9B,IAAA,iBAAiB,EAAE,SAAS;AAC5B,IAAA,eAAe,EAAE,SAAS;AAC1B,IAAA,QAAQ,EAAE,MAAM;AAChB,IAAA,MAAM,EAAE;AACN,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,QAAQ,EAAE,OAAO;AACjB,QAAA,UAAU,EAAE,IAAI;AACjB,KAAA;AACD,IAAA,OAAO,EAAE,SAAS;AAClB,IAAA,UAAU,EAAE;AACV,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,MAAM,EAAE,IAAI;AACZ,QAAA,OAAO,EAAE,IAAI;AACd,KAAA;AACD,IAAA,QAAQ,EAAE,KAAK;AACf,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,QAAQ,EAAE,EAAE;AACZ,IAAA,KAAK,EAAE,KAAK;CACb,CAAC;AAEF;AACA,MAAM,oBAAoB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC5E,oBAAoB,CAAC,aAAa,GAAG,KAAK,CAAC;AAE3C;AACA,MAAM,qBAAqB,GAAQ,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,sBAAsB,CAAC,CAAC;AAC7E,qBAAqB,CAAC,aAAa,GAAG,KAAK,CAAC;AAC5C,qBAAqB,CAAC,UAAU,GAAG;AACjC,IAAA,MAAM,EAAE,KAAK;CACd,CAAC;AAEF;;;;AAIG;SACa,YAAY,CAAC,GAAY,EAAE,OAA0B,EAAE,EAAA;;AACrE,IAAA,qBAAqB,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;IAC/C,qBAAqB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC/D,MAAM,OAAO,GAAG,IAAIE,iBAAM,CAAC,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAC1D,IAAA,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED;;;;AAIG;SACa,QAAQ,CAAC,GAAW,EAAE,OAA0B,EAAE,EAAA;;IAChE,oBAAoB,CAAC,YAAY,GAAG,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC;IACvD,oBAAoB,CAAC,OAAO,GAAG,CAAA,EAAA,GAAA,IAAI,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;IAC9D,MAAM,SAAS,GAAG,IAAIA,iBAAM,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC;IAC1D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;QACrC,IAAI,CAAC,GAAG,EAAE;AACR,YAAA,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;AACxC,SAAA;AAAM,aAAA;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,GAAG,KAAI;AACtC,gBAAA,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;AACb,iBAAA;AAAM,qBAAA;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;AACd,iBAAA;AACH,aAAC,CAAC,CAAC;AACJ,SAAA;AACH,KAAC,CAAC,CAAC;AACL;;AChGA;AA+CA;;;AAGG;AACa,SAAA,qBAAqB,CACnC,2BAAyD,EACzD,cAAkC,EAAA;IAElC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,qBAAqB,CAC9B,UAAU,EACV,OAAO,EACP,2BAA2B,EAC3B,cAAc,CACf,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAEM,MAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AAClE,MAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAE3E,MAAM,6BAA6B,GAA2B;AACnE,IAAA,oBAAoB,EAAE;AACpB,QAAA,IAAI,EAAE,uBAAuB;AAC7B,QAAA,GAAG,EAAE,sBAAsB;AAC5B,KAAA;CACF,CAAC;AAEF;;;AAGG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAK1D,IAAA,WAAA,CACE,UAAyB,EACzB,oBAA0C,EAC1C,2BAAyD,EACzD,iBAAoC,EAAE,EAAA;;AAEtC,QAAA,KAAK,CAAC,UAAU,EAAE,oBAAoB,CAAC,CAAC;AAExC,QAAA,IAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;AAC/F,QAAA,IAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;QAC7F,IAAI,CAAC,UAAU,GAAG,CAAA,EAAA,GAAA,cAAc,CAAC,UAAU,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,WAAW,CAAC;KAC5D;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAA+B,KAChF,uBAAuB,CAAC,IAAI,CAAC,gBAAgB,EAAE,IAAI,CAAC,eAAe,EAAE,QAAQ,EAAE;YAC7E,UAAU,EAAE,IAAI,CAAC,UAAU;AAC5B,SAAA,CAAC,CACH,CAAC;KACH;AACF,CAAA;AAED,SAAS,oBAAoB,CAC3B,cAAqC,EAAA;AAErC,IAAA,IAAI,MAAqC,CAAC;AAC1C,IAAA,MAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;AACxD,IAAA,MAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;AACvE,IAAA,IAAI,aAAa,EAAE;AACjB,QAAA,MAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;AACzD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;AACjE,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC,EAAA;AACtE,IAAA,MAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;AAC3C,IAAA,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;AACf,KAAA;AAAM,SAAA,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;AAC5B,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;AAC5C,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACG,SAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B,EAC/B,OAAA,GAA6B,EAAE,EAAA;;AAE/B,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,QAAQ,mCAAI,EAAE;AAChC,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,WAAW,mCAAI,KAAK;AACzC,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,mCAAI,WAAW;KAC9C,CAAC;AACF,IAAA,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,EAAE,cAAc,CAAC,CAAC,IAAI,CAC5E,CAAC,cAAc,KAAI;AACjB,QAAA,IAAI,CAAC,yBAAyB,CAAC,cAAc,CAAC,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,aAAa,GAAG,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;AAC3D,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,SAAS,EAAE;AAC9C,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;AAED,QAAA,MAAM,YAAY,GAAG,oBAAoB,CAAC,cAAc,CAAC,CAAC;AAE1D,QAAA,MAAM,EAAE,KAAK,EAAE,oBAAoB,EAAE,GAAG,mBAAmB,CACzD,cAAc,EACd,aAAa,EACb,YAAY,CACb,CAAC;AACF,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,MAAM,KAAK,CAAC;AACb,SAAA;AAAM,aAAA,IAAI,oBAAoB,EAAE;AAC/B,YAAA,OAAO,cAAc,CAAC;AACvB,SAAA;;;AAID,QAAA,IAAI,YAAY,EAAE;YAChB,IAAI,YAAY,CAAC,UAAU,EAAE;AAC3B,gBAAA,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;AACxD,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBACpF,kBAAkB;wBAChB,OAAO,kBAAkB,KAAK,QAAQ;8BAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;8BAC3D,EAAE,CAAC;AACV,iBAAA;gBACD,IAAI;AACF,oBAAA,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,EACzB,OAAO,CACR,CAAC;AACH,iBAAA;AAAC,gBAAA,OAAO,UAAe,EAAE;oBACxB,MAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,SAAS,UAAU,CAAA,8CAAA,EAAiD,cAAc,CAAC,UAAU,CAAA,CAAE,EAC/F,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;AACF,oBAAA,MAAM,SAAS,CAAC;AACjB,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;AAE9C,gBAAA,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;AAC7E,aAAA;YAED,IAAI,YAAY,CAAC,aAAa,EAAE;gBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,EAC5B,OAAO,CACR,CAAC;AACH,aAAA;AACF,SAAA;AAED,QAAA,OAAO,cAAc,CAAC;AACxB,KAAC,CACF,CAAC;AACJ,CAAC;AAED,SAAS,oBAAoB,CAAC,aAA4B,EAAA;IACxD,MAAM,mBAAmB,GAAG,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;AACjE,IAAA,QACE,mBAAmB,CAAC,MAAM,KAAK,CAAC;AAChC,SAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,EAC1E;AACJ,CAAC;AAED,SAAS,mBAAmB,CAC1B,cAAqC,EACrC,aAA4B,EAC5B,YAA2C,EAAA;;AAE3C,IAAA,MAAM,iBAAiB,GAAG,GAAG,IAAI,cAAc,CAAC,MAAM,IAAI,cAAc,CAAC,MAAM,GAAG,GAAG,CAAC;AACtF,IAAA,MAAM,oBAAoB,GAAY,oBAAoB,CAAC,aAAa,CAAC;AACvE,UAAE,iBAAiB;AACnB,UAAE,CAAC,CAAC,YAAY,CAAC;AAEnB,IAAA,IAAI,oBAAoB,EAAE;AACxB,QAAA,IAAI,YAAY,EAAE;AAChB,YAAA,IAAI,CAAC,YAAY,CAAC,OAAO,EAAE;gBACzB,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,aAAA;AACF,SAAA;AAAM,aAAA;YACL,OAAO,EAAE,KAAK,EAAE,IAAI,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AACrD,SAAA;AACF,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,YAAY,KAAA,IAAA,IAAZ,YAAY,KAAA,KAAA,CAAA,GAAZ,YAAY,GAAI,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;AAC1E,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,cAAc,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,cAAc,CAAC,MAAM,CAAC;AAC5E,QAAA,cAAc,CAAC,OAAO,CAAC,kBAAkB,CAAC;IAC5C,MAAM,mBAAmB,GAAG,SAAS;AACnC,UAAE,CAAA,wBAAA,EAA2B,cAAc,CAAC,MAAM,CAAE,CAAA;AACpD,UAAG,cAAc,CAAC,UAAqB,CAAC;AAE1C,IAAA,MAAM,KAAK,GAAG,IAAI,SAAS,CACzB,mBAAmB,EACnB,SAAS,EACT,cAAc,CAAC,MAAM,EACrB,cAAc,CAAC,OAAO,EACtB,cAAc,CACf,CAAC;;;IAIF,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,MAAM,KAAK,CAAC;AACb,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,iBAAiB,CAAC,UAAU,CAAC;AACvD,IAAA,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,aAAa,CAAC;IAE7D,IAAI;;;QAGF,IAAI,cAAc,CAAC,UAAU,EAAE;AAC7B,YAAA,MAAM,UAAU,GAAG,cAAc,CAAC,UAAU,CAAC;AAC7C,YAAA,IAAI,WAAW,CAAC;AAChB,YAAA,IAAI,iBAAiB,EAAE;gBACrB,IAAI,kBAAkB,GAAQ,UAAU,CAAC;AACzC,gBAAA,IAAI,aAAa,CAAC,KAAK,IAAI,iBAAiB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;oBAC9E,kBAAkB;AAChB,wBAAA,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,CAAC,iBAAiB,CAAC,cAAe,CAAC,GAAG,EAAE,CAAC;AACvF,iBAAA;AACD,gBAAA,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAChD,iBAAiB,EACjB,kBAAkB,EAClB,2BAA2B,CAC5B,CAAC;AACH,aAAA;YAED,MAAM,aAAa,GAAQ,UAAU,CAAC,KAAK,IAAI,WAAW,IAAI,UAAU,CAAC;AACzE,YAAA,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;YAChC,IAAI,aAAa,CAAC,OAAO,EAAE;AACzB,gBAAA,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;AACvC,aAAA;AAED,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,KAAK,CAAC,QAAS,CAAC,UAAU,GAAG,WAAW,CAAC;AAC1C,aAAA;AACF,SAAA;;AAGD,QAAA,IAAI,cAAc,CAAC,OAAO,IAAI,oBAAoB,EAAE;YAClD,KAAK,CAAC,QAAS,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAClE,oBAAoB,EACpB,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;AAAC,IAAA,OAAO,YAAiB,EAAE;AAC1B,QAAA,KAAK,CAAC,OAAO,GAAG,CAAA,OAAA,EAAU,YAAY,CAAC,OAAO,CAAA,gDAAA,EAAmD,cAAc,CAAC,UAAU,CAAA,2BAAA,CAA6B,CAAC;AACzJ,KAAA;AAED,IAAA,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,KAAK,EAAE,CAAC;AAChD,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC,EACxC,IAAiC,EAAA;;AAEjC,IAAA,MAAM,YAAY,GAAG,CAAC,GAA6B,KAAoB;QACrE,MAAM,GAAG,GAAG,CAAU,OAAA,EAAA,GAAG,gDAAgD,iBAAiB,CAAC,UAAU,CAAA,CAAA,CAAG,CAAC;QACzG,MAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;AAClD,QAAA,MAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,CAClB,CAAC;AACF,QAAA,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAC3B,KAAC,CAAC;AAEF,IAAA,MAAM,SAAS,GACb,CAAA,CAAA,EAAA,GAAA,iBAAiB,CAAC,OAAO,CAAC,yBAAyB,0CAAE,GAAG,CAAC,iBAAiB,CAAC,MAAM,CAAC;AAClF,QAAA,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,CAAC;AAC/C,IAAA,IAAI,CAAC,SAAS,IAAI,iBAAiB,CAAC,UAAU,EAAE;AAC9C,QAAA,MAAM,IAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;AAC1C,QAAA,MAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,MAAM,iBAAiB,GAAa,CAAC,WAAW;AAC9C,cAAE,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,SAAS,KAAK,SAAS,CAAC,WAAW,EAAE,CAAC,CAAC;AACvE,QAAA,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;AAC9B,YAAA,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EACjF;AACA,YAAA,OAAO,IAAI,OAAO,CAAwB,CAAC,OAAO,KAAI;gBACpD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC7B,aAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;AAC3F,YAAA,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC;AACxB,iBAAA,IAAI,CAAC,CAAC,IAAI,KAAI;AACb,gBAAA,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;AACpC,gBAAA,OAAO,iBAAiB,CAAC;AAC3B,aAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C;;AC9XA;AAwBA;;AAEG;AACI,MAAM,uBAAuB,GAAqB;AACvD,IAAA,MAAM,EAAE,IAAI;CACb,CAAC;AAEF;;;;AAIG;AACG,SAAU,eAAe,CAAC,gBAAmC,EAAA;IACjE,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,gBAAgB,IAAI,uBAAuB,CAAC,CAAC;SAC9F;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACZ,gBAAkC,EAAA;AAEnD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFV,IAAgB,CAAA,gBAAA,GAAhB,gBAAgB,CAAkB;KAGpD;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;QAC/C,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;QACjD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzED;AAaA;;AAEG;AACH,MAAM,eAAe,GAAG,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC;AAkBjC,MAAM,sBAAsB,GAAoB;AACrD,IAAA,eAAe,EAAE,IAAI;AACrB,IAAA,UAAU,EAAE,EAAE;CACf,CAAC;AAEF;;;;AAIG;AACa,SAAA,cAAc,CAAC,cAAc,GAAG,EAAE,EAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,cAAe,SAAQ,iBAAiB,CAAA;AACnD,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAW,aAAa,EAAE,EAAA;AAC5F,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QADkD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAK;KAE7F;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,cAAc,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC,CAAC;KAC1D;AACF,CAAA;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB,EAAA;AAEtB,IAAA,MAAM,EAAE,OAAO,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAC;IACrC,MAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;AACxD,IAAA,IACE,cAAc;SACb,MAAM,KAAK,GAAG;AACb,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;AAC5D,aAAC,MAAM,KAAK,GAAG,IAAI,eAAe,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,MAAM,CAAC,UAAU,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,EAC1D;QACA,MAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;AAChC,QAAA,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;QAIjC,IAAI,MAAM,KAAK,GAAG,EAAE;AAClB,YAAA,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;AACrB,SAAA;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;AACpB,aAAA,IAAI,CAAC,CAAC,GAAG,KAAK,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;AACnE,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC;;ACpGA;AACA;AAIO,MAAM,0BAA0B,GAAG,CAAC,CAAC;AAC5C;AACO,MAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,MAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,MAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEpD,SAAU,QAAQ,CAAC,CAAU,EAAA;AACjC,IAAA,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;AAC/B,CAAC;AAaD;;;;;;;;AAQG;AACG,SAAU,WAAW,CACzB,UAAkB,EAClB,SAA4E,EAC5E,SAAoB,EACpB,QAAgC,EAChC,KAAkB,EAAA;AAElB,IAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,KAAK,CAAC,EAAE;AAC/B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,OAAO,SAAS,CAAC,UAAU,GAAG,UAAU,CAAC;AAC3C,CAAC;AAED;;;;;;;AAOG;SACa,eAAe,CAC7B,YAA2F,EAC3F,YAAuB,EAAE,UAAU,EAAE,CAAC,EAAE,aAAa,EAAE,CAAC,EAAE,EAC1D,GAAgB,EAAA;AAEhB,IAAA,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;AACnB,YAAA,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;AAClC,SAAA;AAED,QAAA,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;AACvB,KAAA;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;AAGvB,IAAA,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;AAC/D,IAAA,MAAM,gBAAgB,GACpB,YAAY,CAAC,aAAa,GAAG,GAAG;AAChC,QAAA,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,YAAY,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACjE,cAAc,IAAI,gBAAgB,CAAC;AAEnC,IAAA,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,YAAY,CAAC,gBAAgB,GAAG,cAAc,EAC9C,YAAY,CAAC,gBAAgB,CAC9B,CAAC;AAEF,IAAA,OAAO,SAAS,CAAC;AACnB;;ACtFA;AACA;AAEA;;;;AAIG;AACG,SAAU,SAAS,CAAI,KAA2B,EAAA;IACtD,OAAO,OAAO,KAAK,KAAK,WAAW,IAAI,KAAK,KAAK,IAAI,CAAC;AACxD;;ACVA;AAMA,MAAMC,sBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;AAQG;SACa,KAAK,CACnB,SAAiB,EACjB,KAAS,EACT,OAGC,EAAA;IAED,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;QACrC,IAAI,KAAK,GAA8C,SAAS,CAAC;QACjE,IAAI,SAAS,GAA6B,SAAS,CAAC;QAEpD,MAAM,aAAa,GAAG,MAAW;AAC/B,YAAA,OAAO,MAAM,CACX,IAAIR,0BAAU,CAAC,CAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,aAAa,IAAG,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,aAAa,GAAGQ,sBAAoB,CAAC,CACvF,CAAC;AACJ,SAAC,CAAC;QAEF,MAAM,eAAe,GAAG,MAAW;YACjC,IAAI,CAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,KAAI,SAAS,EAAE;gBACrC,OAAO,CAAC,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAC7D,aAAA;AACH,SAAC,CAAC;QAEF,SAAS,GAAG,MAAW;AACrB,YAAA,IAAI,SAAS,CAAC,KAAK,CAAC,EAAE;gBACpB,YAAY,CAAC,KAAK,CAAC,CAAC;AACrB,aAAA;AACD,YAAA,eAAe,EAAE,CAAC;YAClB,OAAO,aAAa,EAAE,CAAC;AACzB,SAAC,CAAC;AAEF,QAAA,IAAI,CAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,KAAI,OAAO,CAAC,WAAW,CAAC,OAAO,EAAE;YACvD,OAAO,aAAa,EAAE,CAAC;AACxB,SAAA;AAED,QAAA,KAAK,GAAG,UAAU,CAAC,MAAK;AACtB,YAAA,eAAe,EAAE,CAAC;YAClB,OAAO,CAAC,KAAK,CAAC,CAAC;SAChB,EAAE,SAAS,CAAC,CAAC;AAEd,QAAA,IAAI,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EAAE;YACxB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;AAC1D,SAAA;AACH,KAAC,CAAC,CAAC;AACL;;AC9DA;AA0BA;;;;;AAKG;SACa,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACSC,2BAMX;AAND,CAAA,UAAY,SAAS,EAAA;AACnB;;;AAGG;AACH,IAAA,SAAA,CAAA,SAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACb,CAAC,EANWA,iBAAS,KAATA,iBAAS,GAMpB,EAAA,CAAA,CAAA,CAAA;AA8BM,MAAM,mBAAmB,GAAiB;AAC/C,IAAA,UAAU,EAAE,0BAA0B;AACtC,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,iBAAiB,EAAE,iCAAiC;CACrD,CAAC;AAEF;;AAEG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;AAc3D;;;;;;;AAOG;IACH,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAKC,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC;aAClD,KAAK,CAAC,CAAC,KAAK,KAAKA,OAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC;KAC7E;AACF,CAAA;AAED,eAAeA,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB,EAAA;IAEzB,SAAS,iBAAiB,CAAC,aAAqC,EAAA;QAC9D,MAAM,UAAU,GAAG,aAAa,KAAA,IAAA,IAAb,aAAa,KAAb,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,aAAa,CAAE,MAAM,CAAC;QACzC,IAAI,UAAU,KAAK,GAAG,KAAI,QAAQ,KAAR,IAAA,IAAA,QAAQ,uBAAR,QAAQ,CAAE,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,WAAW,CAAC,CAAA,EAAE;AACtF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;QAED,IACE,UAAU,KAAK,SAAS;AACxB,aAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;AACxC,YAAA,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;AACA,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAED,SAAS,GAAG,eAAe,CACzB;QACE,aAAa,EAAE,MAAM,CAAC,aAAa;AACnC,QAAA,gBAAgB,EAAE,CAAC;QACnB,gBAAgB,EAAE,MAAM,CAAC,gBAAgB;AAC1C,KAAA,EACD,SAAS,EACT,YAAY,CACb,CAAC;IAEF,MAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;AAC1F,IAAA,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,QAAQ,CAAC,EAAE;QACxF,MAAM,CAAC,IAAI,CAAC,CAAA,oBAAA,EAAuB,SAAS,CAAC,aAAa,CAAE,CAAA,CAAC,CAAC;QAC9D,IAAI;AACF,YAAA,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AACrC,YAAA,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;YAClE,OAAOA,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC;AAC/C,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,OAAOA,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AACzD,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;AAEjD,QAAA,MAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;AACJ,QAAA,MAAM,GAAG,CAAC;AACX,KAAA;AAAM,SAAA;AACL,QAAA,OAAO,QAAQ,CAAC;AACjB,KAAA;AACH;;AC1MA;AA8CA;;;;AAIG;AACa,SAAA,SAAS,CAAC,cAAA,GAAmC,EAAE,EAAA;IAC7D,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC3D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,SAAU,SAAQ,iBAAiB,CAAA;AA4C9C,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,UACEC,QAAM,GAAGC,MAAU,CAAC,IAAI,EACxB,kBAAkB,GAAG,EAAE,EACvB,sBAAsB,GAAG,EAAE,MACP,EAAE,EAAA;AAExB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,MAAM,GAAGD,QAAM,CAAC;AACrB,QAAA,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,CAAC,CAAC;KAChF;AApDD;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC;KAC1C;AAED;;;;;;AAMG;IACH,IAAW,kBAAkB,CAAC,kBAA+B,EAAA;AAC3D,QAAA,IAAI,CAAC,SAAS,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;KACxD;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,sBAAsB,CAAC;KAC9C;AAED;;;;AAIG;IACH,IAAW,sBAAsB,CAAC,sBAAmC,EAAA;AACnE,QAAA,IAAI,CAAC,SAAS,CAAC,sBAAsB,GAAG,sBAAsB,CAAC;KAChE;AAgBM,IAAA,WAAW,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO;YAAE,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAEvE,QAAA,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;QACzB,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,KAAK,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;KAC7F;AAEO,IAAA,UAAU,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,MAAM,CAAC,CAAA,SAAA,EAAY,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;KAC7D;AAEO,IAAA,WAAW,CAAC,QAA+B,EAAA;QACjD,IAAI,CAAC,MAAM,CAAC,CAAA,sBAAA,EAAyB,QAAQ,CAAC,MAAM,CAAE,CAAA,CAAC,CAAC;AACxD,QAAA,IAAI,CAAC,MAAM,CAAC,CAAY,SAAA,EAAA,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA,CAAE,CAAC,CAAC;AACrE,QAAA,OAAO,QAAQ,CAAC;KACjB;AACF;;ACxID;AACA;AAqDA;;;;AAIG;AACG,SAAU,0BAA0B,CAAC,SAA6B,EAAA;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAEe,SAAA,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc,EAAA;AAEd,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;AACxB,KAAA;AAAM,SAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AACvC,QAAA,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClC,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;AC5EA;AAiGA;;;AAGG;AACG,SAAU,4BAA4B,CAAC,aAA4B,EAAA;AACvE,IAAA,MAAM,MAAM,GAAG,IAAI,GAAG,EAAU,CAAC;AACjC,IAAA,KAAK,MAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,MAAM,iBAAiB,GAAG,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QAC9D,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AAChC,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB;;ACjHA;SAOgB,sBAAsB,GAAA;AACpC,IAAA,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;SAEe,uBAAuB,GAAA;AACrC,IAAA,MAAM,WAAW,GAAG;AAClB,QAAA,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;AAEF,IAAA,MAAM,MAAM,GAAG;AACb,QAAA,GAAG,EAAE,IAAI;AACT,QAAA,KAAK,EAAE,CAAI,CAAA,EAAAE,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,IAAI,EAAE,CAAI,CAAA,EAAAA,aAAE,CAAC,OAAO,EAAE,CAAG,CAAA,CAAA;KACrD,CAAC;AAEF,IAAA,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B;;ACvBA;AA+BA,SAAS,cAAc,GAAA;AACrB,IAAA,MAAM,aAAa,GAAG;AACpB,QAAA,GAAG,EAAE,WAAW;QAChB,KAAK,EAAE,SAAS,CAAC,eAAe;KACjC,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAY,GAAG,GAAG,EAClB,cAAc,GAAG,GAAG,EAAA;AAEpB,IAAA,OAAO,aAAa;AACjB,SAAA,GAAG,CAAC,CAAC,IAAI,KAAI;AACZ,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,CAAA,EAAG,cAAc,CAAG,EAAA,IAAI,CAAC,KAAK,CAAA,CAAE,GAAG,EAAE,CAAC;AACjE,QAAA,OAAO,GAAG,IAAI,CAAC,GAAG,CAAG,EAAA,KAAK,EAAE,CAAC;AAC/B,KAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAEM,MAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE;;;AAGG;SACa,wBAAwB,GAAA;AACtC,IAAA,MAAM,WAAW,GAAG,cAAc,EAAE,CAAC;AACrC,IAAA,MAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,MAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;AAC/E,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;AAIG;AACG,SAAU,eAAe,CAAC,aAA6B,EAAA;AAC3D,IAAA,MAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,KAAK,IAAI;UAC3E,sBAAsB,EAAE;AAC1B,UAAE,aAAa,CAAC,GAAG,CAAC;AACxB,IAAA,MAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,KAAK,SAAS,IAAI,aAAa,CAAC,KAAK,KAAK,IAAI;UAC/E,wBAAwB,EAAE;AAC5B,UAAE,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,eAAgB,SAAQ,iBAAiB,CAAA;AACpD,IAAA,WAAA,CACW,WAA0B,EAC1B,QAA8B,EAC7B,SAAiB,EACjB,WAAmB,EAAA;AAE7B,QAAA,KAAK,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;QALpB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAe;QAC1B,IAAQ,CAAA,QAAA,GAAR,QAAQ,CAAsB;QAC7B,IAAS,CAAA,SAAA,GAAT,SAAS,CAAQ;QACjB,IAAW,CAAA,WAAA,GAAX,WAAW,CAAQ;KAG9B;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;AAClC,QAAA,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AAED;;AAEG;AACH,IAAA,kBAAkB,CAAC,OAAwB,EAAA;AACzC,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,YAAA,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACrC,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;AACvD,SAAA;KACF;AACF;;ACtHD;AACA;AAEA;;AAEG;AACSC,uCAqBX;AArBD,CAAA,UAAY,qBAAqB,EAAA;AAC/B;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,GAAS,CAAA;AACT;;AAEG;AACH,IAAA,qBAAA,CAAA,KAAA,CAAA,GAAA,IAAU,CAAA;AACV;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,GAAW,CAAA;AACX;;AAEG;AACH,IAAA,qBAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EArBWA,6BAAqB,KAArBA,6BAAqB,GAqBhC,EAAA,CAAA,CAAA;;AC3BD;AA8CA;AACO,MAAM,sBAAsB,GAAuB;AACxD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AAED;AAEA;;;;;;AAMG;AACa,SAAA,+BAA+B,CAC7C,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,MAAM,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,iBAAiB,CAAC;IAEvE,MAAM,+BAAgC,SAAQ,iBAAiB,CAAA;QAC7D,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,QAAQ,CAAC;gBAC/B,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;AACF,aAAA,CAAC,CAAC;AACH,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAClD;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+BAA+B,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjE;KACF,CAAC;AACJ;;ACxQA;AAYA;;;AAGG;SACa,kCAAkC,GAAA;IAChD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,kCAAkC,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;;;AAGG;AACG,MAAO,kCAAmC,SAAQ,iBAAiB,CAAA;AACvE;;;;;AAKG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;;AAKG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,OAAO,CAAC,kBAAkB,GAAG,KAAK,CAAC;QACnC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACnDD;AAYA;;;AAGG;AACa,SAAA,6BAA6B,CAC3C,mBAAmB,GAAG,wBAAwB,EAAA;IAE9C,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,6BAA8B,SAAQ,iBAAiB,CAAA;AAClE,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACrB,oBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFnB,IAAoB,CAAA,oBAAA,GAApB,oBAAoB,CAAQ;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;AACxD,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,OAAO,CAAC,SAAS,CAAC,CAAC;AACnE,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACzCD;AAMA,IAAI,gBAAwC,CAAC;SAE7B,0BAA0B,GAAA;IACxC,IAAI,CAAC,gBAAgB,EAAE;AACrB,QAAA,gBAAgB,GAAG,IAAIC,mBAAiB,EAAE,CAAC;AAC5C,KAAA;AAED,IAAA,OAAO,gBAAgB,CAAC;AAC1B;;ACdA;SAegB,YAAY,GAAA;IAC1B,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,YAAY,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC9C;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACH,MAAM,YAAa,SAAQ,iBAAiB,CAAA;AAC1C;;AAEG;IACH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;AAEG;IACI,MAAM,WAAW,CAAC,OAAwB,EAAA;;AAE/C,QAAA,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAI,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACpE,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACtC,YAAA,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;gBACvB,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACzE,aAAA;AACF,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/CD;AAgBA;;;AAGG;AACI,MAAM,iBAAiB,GAAa,EAAE,CAAC;AAC9C,IAAI,iBAAiB,GAAY,KAAK,CAAC;AAEvC;AACA,MAAM,iBAAiB,GAAyB,IAAI,GAAG,EAAE,CAAC;AAE1D,SAAS,yBAAyB,GAAA;IAChC,IAAI,CAAC,OAAO,EAAE;AACZ,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,MAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;AAE5D,IAAA,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;;;;AAIG;AACH,SAAS,UAAU,CACjB,GAAW,EACX,WAAqB,EACrB,WAAkC,EAAA;AAElC,IAAA,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;AAC5B,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IACD,MAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,WAAW,KAAX,IAAA,IAAA,WAAW,KAAX,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,WAAW,CAAE,GAAG,CAAC,IAAI,CAAC,EAAE;AAC1B,QAAA,OAAO,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;AAC9B,KAAA;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;AAC3B,IAAA,KAAK,MAAM,OAAO,IAAI,WAAW,EAAE;AACjC,QAAA,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;AAGtB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;AACvB,iBAAA;AACF,aAAA;AACF,SAAA;AAAM,aAAA;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;AACvB,aAAA;AACF,SAAA;AACF,KAAA;IACD,WAAW,KAAA,IAAA,IAAX,WAAW,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAX,WAAW,CAAE,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;AACvC,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;AAEG;SACa,WAAW,GAAA;IACzB,MAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,iBAAiB,GAAG,IAAI,CAAC;AACzB,IAAA,IAAI,OAAO,EAAE;AACX,QAAA,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,IAAI,EAAE,CAAC;aAC1B,MAAM,CAAC,CAAC,IAAI,KAAK,IAAI,CAAC,MAAM,CAAC,CAAC;AAClC,KAAA;AAED,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;AAIG;AACG,SAAU,uBAAuB,CAAC,QAAiB,EAAA;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;AACb,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AAED,IAAA,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;IAC5E,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;AACnD,IAAA,MAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;AACL,QAAA,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ;QACR,QAAQ;KACT,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACa,SAAA,WAAW,CACzB,aAA6B,EAC7B,OAGC,EAAA;IAED,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;AAC3C,KAAA;IACD,IAAI,CAAC,iBAAiB,EAAE;AACtB,QAAA,iBAAiB,CAAC,IAAI,CAAC,GAAG,WAAW,EAAE,CAAC,CAAC;AAC1C,KAAA;IACD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,oBAA0C,KAAI;AAChF,YAAA,OAAO,IAAI,WAAW,CACpB,UAAU,EACV,oBAAoB,EACpB,aAAc,EACd,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,iBAAiB,CAC3B,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,GAAW,EAAA;IAKrC,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACjC,IAAA,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;AAClB,QAAA,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;AAChC,KAAA;IAED,MAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACvC,IAAA,MAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,MAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,MAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;AACrC,IAAA,MAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;AACtC,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;AACpE,IAAA,MAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;AAC1E,IAAA,MAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ;QACR,QAAQ;QACR,cAAc;KACf,CAAC;AACJ,CAAC;AAEK,MAAO,WAAY,SAAQ,iBAAiB,CAAA;AAChD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,aAA4B,EAC3B,iBAA4B,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAHpB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAe;QAC3B,IAAiB,CAAA,iBAAA,GAAjB,iBAAiB,CAAW;KAGrC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;;QACzC,IACE,CAAC,OAAO,CAAC,aAAa;YACtB,CAAC,UAAU,CACT,OAAO,CAAC,GAAG,EACX,CAAA,EAAA,GAAA,IAAI,CAAC,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,iBAAiB,EAC3C,IAAI,CAAC,iBAAiB,GAAG,SAAS,GAAG,iBAAiB,CACvD,EACD;AACA,YAAA,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;AAC5C,SAAA;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACrMD;AAcgB,SAAA,oBAAoB,CAAC,YAAY,GAAG,EAAE,EAAA;IACpD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAEK,MAAO,oBAAqB,SAAQ,iBAAiB,CAAA;AACzD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACpB,gBAAgB,EAAE,EAAA;AAE3B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFlB,IAAa,CAAA,aAAA,GAAb,aAAa,CAAK;KAG5B;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,IAAI,CAAC,CAAC,QAAQ,KAAK,gBAAgB,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B,EAAA;AAE/B,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,MAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;AACxE,QAAA,IAAI,MAAM,EAAE;YACV,MAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;AAI3C,iBAAA,KAAK,CAAC,MAAM,KAAK,CAAC;AAClB,iBAAA,IAAI,CAAC,CAAC,kBAAkB,KAAI;AAC3B,gBAAA,IAAI,kBAAkB,EAAE;;;AAGtB,oBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEC,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,iBAAA;AACD,gBAAA,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;AACH,SAAA;AACF,KAAA;AAED,IAAA,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;AAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAW,GAAG,KAAK,EAAA;AAEnB,IAAA,MAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;AAC5D,IAAA,IAAI,WAAW,EAAE;AACf,QAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AACtC,KAAA;;;AAID,IAAA,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;AAE1E,IAAA,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;AAKG;AACH,SAAS,yBAAyB,CAAC,IAAY,EAAA;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;AACzB,IAAA,IAAI,IAAI,EAAE;QACR,IAAI;AACF,YAAA,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AACD,QAAA,IACE,YAAY;AACZ,YAAA,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;AACvB,YAAA,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;AACA,YAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC/D,YAAA,IAAI,QAAQ,EAAE;AACZ,gBAAA,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;AACzB,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACH,SAAS,sBAAsB,CAAC,GAAW,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC;IACX,MAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;AAChE,IAAA,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;AAC3B,QAAA,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,GAAG,CAAA,CAAA,CAAG,CAAC,CAAC;AAClF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;AAOG;AACH,eAAe,UAAU,CACvB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC,EAAA;AAEhC,IAAA,MAAM,OAAO,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,kCAAkC,CAAC;AACpF,IAAA,MAAM,MAAM,GAAG,CAAA,EAAG,SAAS,CAAa,UAAA,EAAA,QAAQ,yBAAyB,CAAC;AAC1E,IAAA,MAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;AACzD,IAAA,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;AAC3B,IAAA,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAClE,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;AAC3B,QAAA,MAAM,IAAI,KAAK,CAAC,uBAAuB,QAAQ,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC7F,KAAA;IACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;AAChE,CAAC;AAED;;;;;;;;AAQG;AACH,eAAe,qBAAqB,CAClC,MAA4B,EAC5B,GAAW,EACX,eAAgC,EAAA;AAEhC,IAAA,MAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;AAC9D,IAAA,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;AACrB,IAAA,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;AAC7D,IAAA,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,CAAC;AAC3B,IAAA,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;AACrF,QAAA,OAAO,IAAI,CAAC;AACb,KAAA;AAAM,SAAA;QACL,MAAM,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,CAAC;QACzC,OAAO,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,CAAC;AAC5D,KAAA;AACH;;AClMA;AAaA;;;;AAIG;AACG,SAAU,aAAa,CAC3B,sBAAgD,EAAA;IAEhD,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAClD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EACtB,sBAAgD,EAAA;AAEvD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAFpB,IAAsB,CAAA,sBAAA,GAAtB,sBAAsB,CAA0B;KAGxD;AAED,IAAA,WAAW,CAAC,OAAwB,EAAA;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,WAAW,KAChD,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAC1C,CAAC;KACH;AACF;;ACjDD;AAwBA;;;;;;;AAOG;AACG,SAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;IAEzB,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMG;AACG,MAAO,sBAAuB,SAAQ,iBAAiB,CAAA;IAM3D,WACE,CAAA,UAAyB,EACzB,OAA6B,EAC7B,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB,EAAA;AAEzB,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;AACjF,QAAA,IAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;AAC7F,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;AACtC,QAAA,IAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;AAChD,cAAE,gBAAgB;cAChB,iCAAiC,CAAC;KACvC;AAEM,IAAA,WAAW,CAAC,OAAwB,EAAA;QACzC,OAAO,IAAI,CAAC,WAAW;AACpB,aAAA,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;AAC5B,aAAA,KAAK,CAAC,CAAC,KAAK,KAAK,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;KAClE;AACF,CAAA;AAED,eAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB,EAAA;IAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;AAEpD,IAAA,SAAS,iBAAiB,CAAC,SAAiC,EAAE,KAAkB,EAAA;AAC9E,QAAA,IACE,KAAK;AACL,YAAA,KAAK,CAAC,IAAI;AACV,aAAC,KAAK,CAAC,IAAI,KAAK,WAAW;gBACzB,KAAK,CAAC,IAAI,KAAK,iBAAiB;gBAChC,KAAK,CAAC,IAAI,KAAK,cAAc;gBAC7B,KAAK,CAAC,IAAI,KAAK,YAAY;AAC3B,gBAAA,KAAK,CAAC,IAAI,KAAK,QAAQ,CAAC,EAC1B;AACA,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED,IAAA,IAAI,WAAW,CAAC,MAAM,CAAC,UAAU,EAAE,iBAAiB,EAAE,SAAS,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAE;;QAExF,IAAI;AACF,YAAA,MAAM,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrC,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,SAAc,EAAE;AACvB,YAAA,OAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,SAAS,EAAE,SAAS,CAAC,CAAC;AACxE,SAAA;AACF,KAAA;AAAM,SAAA;AACL,QAAA,IAAI,GAAG,EAAE;;YAEP,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;AACxC,SAAA;AACD,QAAA,OAAO,iBAAiB,CAAC;AAC1B,KAAA;AACH;;AClIA;AACA;AAEA;;AAEG;AACI,MAAM,8BAA8B,GAAG,CAAC;;ACN/C;AAoBA,MAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AAExD;;;;;;;;;AASG;SACa,qBAAqB,GAAA;IACnC,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACvD;KACF,CAAC;AACJ,CAAC;AAED,MAAM,oBAAoB,GAAG,4BAA4B,CAAC;AAE1D;;;;;;;;AAQG;AACG,MAAO,qBAAsB,SAAQ,iBAAiB,CAAA;AAI1D,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAiC,EAAA;AAEjC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAPrB,IAAe,CAAA,eAAA,GAAG,CAAC,CAAC;QAQ1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,IAAI,CAAC,uBAAuB,CAAC;KACxE;IAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,QAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC;AACzE,QAAA,IACE,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe;AAC/C,YAAA,QAAQ,CAAC,MAAM,KAAK,WAAW,CAAC,kBAAkB,EAClD;AACA,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAM,aAAA;YACL,OAAO,IAAI,CAAC,eAAe,CAAC,WAAW,EAAE,QAAQ,CAAC,CAAC;AACpD,SAAA;KACF;AAEO,IAAA,MAAM,uBAAuB,CACnC,WAA4B,EAC5B,YAAmC,EAAA;;AAEnC,QAAA,MAAM,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;AAEF,QAAA,IAAI,gBAAgB,EAAE;YACpB,MAAM,SAAS,GACb,qBAAqB,CAAC,qBAAqB,CAAC,gBAAgB,CAAC,CAAC;AAChE,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,IAAI,CAAC,eAAe,IAAI,CAAC,CAAC;AAE1B,gBAAA,MAAM,KAAK,CAAC,SAAS,EAAE,SAAS,EAAE;oBAChC,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,oBAAA,aAAa,EAAE,oBAAoB;AACpC,iBAAA,CAAC,CAAC;AAEH,gBAAA,IAAI,MAAA,WAAW,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AACpC,oBAAA,MAAM,IAAIhB,0BAAU,CAAC,oBAAoB,CAAC,CAAC;AAC5C,iBAAA;AAED,gBAAA,IAAI,IAAI,CAAC,eAAe,GAAG,8BAA8B,EAAE;AACzD,oBAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACtC,iBAAA;AAAM,qBAAA;oBACL,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,YAAY,CAAC;KACrB;IAEM,OAAO,qBAAqB,CAAC,WAAmB,EAAA;AACrD,QAAA,MAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;AAChD,QAAA,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;AACrC,YAAA,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;AACrE,SAAA;AAAM,aAAA;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;AACnC,SAAA;KACF;IAEM,OAAO,yBAAyB,CAAC,WAAmB,EAAA;QACzD,IAAI;AACF,YAAA,MAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,MAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,MAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;AAExB,YAAA,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;AAC9C,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;AACF;;AClID;AAqBA,MAAM,UAAU,GAAGiB,8BAAkB,CAAC;AACpC,IAAA,aAAa,EAAE,EAAE;AACjB,IAAA,SAAS,EAAE,EAAE;AACd,CAAA,CAAC,CAAC;AAYH;;;;AAIG;AACa,SAAA,aAAa,CAAC,cAAA,GAAuC,EAAE,EAAA;IACrE,OAAO;QACL,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;YAC7D,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAC/D;KACF,CAAC;AACJ,CAAC;AAED;;AAEG;AACG,MAAO,aAAc,SAAQ,iBAAiB,CAAA;AAGlD,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,cAAoC,EAAA;AAEpC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,cAAc,CAAC,SAAS,CAAC;KAC3C;IAEM,MAAM,WAAW,CAAC,OAAwB,EAAA;AAC/C,QAAA,IAAI,CAAC,OAAO,CAAC,cAAc,EAAE;YAC3B,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,MAAM,IAAI,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;QAEzC,IAAI,CAAC,IAAI,EAAE;YACT,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC7D,YAAA,IAAI,CAAC,kBAAkB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;AACxC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC;AAChC,YAAA,MAAM,GAAG,CAAC;AACX,SAAA;KACF;AAED,IAAA,aAAa,CAAC,OAAwB,EAAA;;QACpC,IAAI;;;YAGF,MAAM,EAAE,IAAI,EAAE,GAAG,UAAU,CAAC,CAAA,KAAA,EAAQ,OAAO,CAAC,MAAM,CAAA,CAAE,EAAE;AACpD,gBAAA,cAAc,EAAE;oBACd,WAAW,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACL,OAAe,CAAC,WAAW,CAAA,EAAA,EAC/B,IAAI,EAAEC,oBAAQ,CAAC,MAAM,EACtB,CAAA;oBACD,cAAc,EAAE,OAAO,CAAC,cAAc;AACvC,iBAAA;AACF,aAAA,CAAC,CAAC;;AAGH,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACvB,IAAI,CAAC,GAAG,EAAE,CAAC;AACX,gBAAA,OAAO,SAAS,CAAC;AAClB,aAAA;AAED,YAAA,MAAM,oBAAoB,GAAG,CAAA,EAAA,GAAA,OAAO,CAAC,cAAc,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC,CAAC;AAE1F,YAAA,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;AAC5C,gBAAA,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,oBAAoB,CAAC,CAAC;AACzD,aAAA;YAED,IAAI,CAAC,aAAa,CAAC;gBACjB,aAAa,EAAE,OAAO,CAAC,MAAM;gBAC7B,UAAU,EAAE,OAAO,CAAC,GAAG;gBACvB,SAAS,EAAE,OAAO,CAAC,SAAS;AAC7B,aAAA,CAAC,CAAC;YAEH,IAAI,IAAI,CAAC,SAAS,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,iBAAiB,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACtD,aAAA;;AAGD,YAAA,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AACvC,YAAA,MAAM,iBAAiB,GAAGC,gCAAoB,CAAC,WAAW,CAAC,CAAC;AAC5D,YAAA,IAAI,iBAAiB,IAAIC,8BAAkB,CAAC,WAAW,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,aAAa,EAAE,iBAAiB,CAAC,CAAC;AACtD,gBAAA,MAAM,UAAU,GAAG,WAAW,CAAC,UAAU,IAAI,WAAW,CAAC,UAAU,CAAC,SAAS,EAAE,CAAC;;AAEhF,gBAAA,IAAI,UAAU,EAAE;oBACd,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;AAC/C,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACrF,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;KACF;IAEO,eAAe,CAAC,IAAU,EAAE,GAAQ,EAAA;QAC1C,IAAI;YACF,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,GAAG,CAAC,OAAO;AACrB,aAAA,CAAC,CAAC;YAEH,IAAI,GAAG,CAAC,UAAU,EAAE;gBAClB,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC;AACvD,aAAA;YACD,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;IAEO,kBAAkB,CAAC,IAAU,EAAE,QAA+B,EAAA;QACpE,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;YACvD,MAAM,gBAAgB,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;AACjE,YAAA,IAAI,gBAAgB,EAAE;AACpB,gBAAA,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;AACzD,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,EAAE;AACxB,aAAA,CAAC,CAAC;YACH,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,CAAC,OAAO,CAAC,CAAA,kDAAA,EAAqD,KAAK,CAAC,OAAO,CAAE,CAAA,CAAC,CAAC;AACtF,SAAA;KACF;AACF;;AC1KD;AAiKA;;AAEG;MACU,aAAa,CAAA;AAsBxB;;;;AAIG;AACH,IAAA,WAAA,CACE,WAAwD;;IAExD,OAA8B,EAAA;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,CAAC;QACtE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;AAElF,QAAA,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;AACjD,YAAA,MAAM,CAAC,IAAI,CAAC,8CAA8C,CAAC,CAAC;AAC5D,YAAA,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;AACzD,SAAA;AAAM,aAAA;YACL,IAAI,iBAAiB,GAAqC,SAAS,CAAC;AACpE,YAAA,IAAIC,0BAAiB,CAAC,WAAW,CAAC,EAAE;AAClC,gBAAA,MAAM,CAAC,IAAI,CACT,sFAAsF,CACvF,CAAC;;;;;;;gBAOF,MAAM,oBAAoB,GAA+B,MAAK;oBAC5D,IAAI,wBAAwB,GAAqC,SAAS,CAAC;;oBAE3E,MAAM,aAAa,GAAG,IAAI,CAAC;oBAC3B,MAAM,oBAAoB,GAAG,OAAO,CAAC;oBACrC,OAAO;wBACL,MAAM,CAAC,UAAyB,EAAE,aAAmC,EAAA;4BACnE,MAAM,gBAAgB,GAAG,mBAAmB,CAC1C,oBAAoB,EACpB,aAAa,CAAC,OAAO,CACtB,CAAC;4BAEF,IAAI,CAAC,gBAAgB,EAAE;AACrB,gCAAA,MAAM,IAAI,KAAK,CACb,CAAA,iKAAA,CAAmK,CACpK,CAAC;AACH,6BAAA;AAED,4BAAA,IAAI,wBAAwB,KAAK,SAAS,IAAI,wBAAwB,KAAK,IAAI,EAAE;AAC/E,gCAAA,wBAAwB,GAAG,+BAA+B,CACxD,WAAW,EACX,gBAAgB,CACjB,CAAC;AACH,6BAAA;4BAED,OAAO,wBAAwB,CAAC,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,CAAC;yBACnE;qBACF,CAAC;AACJ,iBAAC,CAAC;gBAEF,iBAAiB,GAAG,oBAAoB,EAAE,CAAC;AAC5C,aAAA;iBAAM,IAAI,WAAW,IAAI,OAAO,WAAW,CAAC,WAAW,KAAK,UAAU,EAAE;AACvE,gBAAA,MAAM,CAAC,IAAI,CAAC,kEAAkE,CAAC,CAAC;AAChF,gBAAA,iBAAiB,GAAG,aAAa,CAAC,WAAW,CAAC,CAAC;AAChD,aAAA;AAAM,iBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;AAC5D,gBAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,+CAA+C,CAAC,CAAC;AAC7D,YAAA,sBAAsB,GAAG,mCAAmC,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;YACzF,IAAI,OAAO,CAAC,sBAAsB,EAAE;;;gBAGlC,MAAM,yBAAyB,GAC7B,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;AACzD,gBAAA,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;AACpD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;AAED;;AAEG;AACH,IAAA,WAAW,CAAC,OAAgD,EAAA;AAC1D,QAAA,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;AAC5E,YAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,SAAA;AAED,QAAA,IAAI,WAA4B,CAAC;QACjC,IAAI;AACF,YAAA,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;AACvB,aAAA;AAAM,iBAAA;AACL,gBAAA,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AAChC,gBAAA,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAC5C,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAC9B,SAAA;AAED,QAAA,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3E,YAAA,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;AACjE,gBAAA,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,MAAM,oBAAoB,CACxB,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B,EAAA;;AAE/B,QAAA,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;AACpD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;AACtC,YAAA,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;AACxC,SAAA;QAED,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,QAAA,MAAM,WAAW,GAAoB,IAAI,WAAW,EAAE,CAAC;AAEvD,QAAA,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,MAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;AACZ,gBAAA,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;AACH,aAAA;AAED,YAAA,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;AAC9C,YAAA,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,MAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;AACtB,gBAAA,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;AAC3C,aAAA;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;AACzE,gBAAA,KAAK,MAAM,YAAY,IAAI,aAAa,CAAC,aAAa,EAAE;AACtD,oBAAA,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,EACxC,iBAAiB,CAClB,CAAC;AACF,oBAAA,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;AAC9B,wBAAA,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;AAC3D,qBAAA;AACD,oBAAA,UAAU,CAAC,UAAU,CACnB,IAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,GAAG,EACrF,iBAAiB,CAClB,CAAC;AACH,iBAAA;AACF,aAAA;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;AAC7E,gBAAA,KAAK,MAAM,cAAc,IAAI,aAAa,CAAC,eAAe,EAAE;AAC1D,oBAAA,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,mBAAmB,KAAK,IAAI,EAAE;wBACrE,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,EAC1C,iBAAiB,CAClB,CAAC;AACF,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;AAC7C,4BAAA,cAAc,CAAC,gBAAgB,KAAK,IAAI,EACxC;AACA,4BAAA,IAAI,cAAc,CAAC,gBAAgB,KAAKR,6BAAqB,CAAC,KAAK,EAAE;AACnE,gCAAA,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;;oCAEpC,SAAS;AACV,iCAAA;AAAM,qCAAA;AACL,oCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,wCAAA,MAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC;AACxB,4CAAA,IAAI,KAAK,SAAS,IAAI,IAAI,KAAK,IAAI,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;AAC9D,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,gCAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,6BAAA;AACF,yBAAA;AACD,wBAAA,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;AAChC,4BAAA,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;AACtC,gCAAA,KAAK,MAAM,KAAK,IAAI,mBAAmB,EAAE;AACvC,oCAAA,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;AACxC,wCAAA,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;AAC7E,qCAAA;AACF,iCAAA;AACF,6BAAA;AAAM,iCAAA;AACL,gCAAA,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;AAC/D,6BAAA;AACF,yBAAA;AACD,wBAAA,IACE,cAAc,CAAC,gBAAgB,KAAK,SAAS;4BAC7C,cAAc,CAAC,gBAAgB,KAAK,IAAI;AACxC,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;AAC/D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;AAC7D,4BAAA,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACjF,yBAAA;AACD,wBAAA,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;AACH,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,MAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;AACzE,YAAA,IAAI,WAAW,IAAI,aAAa,CAAC,WAAW,EAAE;gBAC5C,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;AACtD,aAAA;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAClC,gBAAA,KAAK,MAAM,eAAe,IAAI,aAAa,CAAC,gBAAgB,EAAE;AAC5D,oBAAA,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,oBAAA,IAAI,WAAW,KAAK,SAAS,IAAI,WAAW,KAAK,IAAI,EAAE;wBACrD,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,EAC3C,iBAAiB,CAClB,CAAC;AACF,wBAAA,MAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;AACxE,6BAAA,sBAAsB,CAAC;AAC1B,wBAAA,IAAI,sBAAsB,EAAE;4BAC1B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;AAC1C,gCAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AACzE,6BAAA;AACF,yBAAA;AAAM,6BAAA;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;AACnC,gCAAA,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;AACH,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AAED,YAAA,MAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;AAC3E,YAAA,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,oBAAA,KAAK,MAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;AACpD,wBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;AACpF,qBAAA;AACF,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,oBAAA,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AAC/C,iBAAA;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;AACnB,oBAAA,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AACvC,iBAAA;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,oBAAA,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;AACzD,iBAAA;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;AAC9B,oBAAA,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;AAC7D,iBAAA;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;;AAEtB,oBAAA,WAAmB,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;AACxD,iBAAA;gBAED,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,oBAAA,WAAW,CAAC,cAAc,GAAG,OAAO,CAAC,cAAc,CAAC;AACrD,iBAAA;gBAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,SAAS,IAAI,OAAO,CAAC,iBAAiB,KAAK,IAAI,EAAE;AACjF,oBAAA,WAAW,CAAC,iBAAiB,GAAG,OAAO,CAAC,iBAAiB,CAAC;AAC3D,iBAAA;AACF,aAAA;AAED,YAAA,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;AAE3E,YAAA,IAAI,WAAW,CAAC,yBAAyB,KAAK,SAAS,EAAE;AACvD,gBAAA,WAAW,CAAC,yBAAyB,GAAG,4BAA4B,CAAC,aAAa,CAAC,CAAC;AACrF,aAAA;AAED,YAAA,IAAI,WAAkC,CAAC;AACvC,YAAA,IAAI,gBAAgB,CAAC;YACrB,IAAI;gBACF,WAAW,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AACnD,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,gBAAgB,GAAG,KAAK,CAAC;AAC1B,aAAA;AACD,YAAA,IAAI,gBAAgB,EAAE;gBACpB,IAAI,gBAAgB,CAAC,QAAQ,EAAE;AAC7B,oBAAA,gBAAgB,CAAC,OAAO,GAAG,eAAe,CACxC,gBAAgB,CAAC,QAAQ,EACzB,aAAa,CAAC,SAAS,CAAC,gBAAgB,CAAC,UAAU,CAAC;AAClD,wBAAA,aAAa,CAAC,SAAS,CAAC,SAAS,CAAC,CACrC,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,gBAAgB,CAAC,CAAC;AAC3C,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,GAAG,OAAO,CAAC,OAAO,CACtB,eAAe,CAAC,WAAY,EAAE,aAAa,CAAC,SAAS,CAAC,WAAY,CAAC,MAAM,CAAC,CAAC,CAC5E,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;QAED,MAAM,EAAE,GAAG,QAAQ,CAAC;AACpB,QAAA,IAAI,EAAE,EAAE;YACN,MAAM;iBACH,IAAI,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,CAAC;iBACvF,KAAK,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5B,SAAA;AAED,QAAA,OAAO,MAAM,CAAC;KACf;AACF,CAAA;AAEK,SAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAAA;;IAE5B,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AAC9E,IAAA,MAAM,cAAc,GAAgC;AAClD,QAAA,QAAQ,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,QAAQ,mCAAI,EAAE;AAC1C,QAAA,WAAW,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,WAAW,mCAAI,KAAK;AACnD,QAAA,UAAU,EAAE,CAAA,EAAA,GAAA,iBAAiB,CAAC,UAAU,mCAAI,WAAW;KACxD,CAAC;AAEF,IAAA,MAAM,UAAU,GAAG,iBAAiB,CAAC,UAAU,CAAC;IAChD,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;AACjE,QAAA,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;AAEF,QAAA,MAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;AACpD,QAAA,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,cAAc,EAAE,cAAc,EAAE,YAAY,EAAE,kBAAkB,EAAE,GAC3F,UAAU,CAAC;AACb,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QAEtC,IAAI;AACF,YAAA,IAAI,CAAC,WAAW,CAAC,IAAI,KAAK,SAAS,IAAI,WAAW,CAAC,IAAI,KAAK,IAAI,KAAK,QAAQ,EAAE;gBAC7E,MAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;AACF,gBAAA,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,EAC9B,cAAc,CACf,CAAC;AAEF,gBAAA,MAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAEhD,IAAI,aAAa,CAAC,KAAK,EAAE;AACvB,oBAAA,MAAM,QAAQ,GAAG,kBAAkB,GAAG,CAAS,MAAA,EAAA,kBAAkB,CAAE,CAAA,GAAG,OAAO,CAAC;AAC9E,oBAAA,MAAM,KAAK,GAAG,wBAAwB,CACpC,YAAY,EACZ,QAAQ,EACR,QAAQ,EACR,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACF,oBAAA,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BS,kBAAwB,CACtB,KAAK,EACL,cAAc,IAAI,OAAO,IAAI,cAAe,EAC5C,QAAQ,EACR,YAAY,CACb,EACD;4BACE,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CACF,CAAC;AACH,qBAAA;yBAAM,IAAI,CAAC,QAAQ,EAAE;AACpB,wBAAA,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,KAAK,EAAE;4BACrC,QAAQ,EAAE,OAAO,IAAI,cAAc;4BACnC,UAAU;AACX,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACF,iBAAA;AAAM,qBAAA,IACL,QAAQ,KAAK,UAAU,CAAC,MAAM;AAC9B,qBAAC,CAAA,CAAA,EAAA,GAAA,aAAa,CAAC,WAAW,0CAAE,KAAK,CAAC,YAAY,CAAC,KAAI,aAAa,CAAC,SAAS,KAAK,MAAM,CAAC,EACtF;;;oBAGA,OAAO;AACR,iBAAA;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;AACrD,iBAAA;AACF,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CACb,CAAA,OAAA,EAAU,KAAK,CAAC,OAAO,2CAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,CAAA,CAAA,CAAG,CACL,CAAC;AACH,SAAA;AACF,KAAA;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;AAC1F,QAAA,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;AAC1B,QAAA,KAAK,MAAM,iBAAiB,IAAI,aAAa,CAAC,kBAAkB,EAAE;AAChE,YAAA,MAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;AACF,YAAA,IAAI,sBAAsB,KAAK,SAAS,IAAI,sBAAsB,KAAK,IAAI,EAAE;AAC3E,gBAAA,MAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,EAC7C,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;AACH,CAAC;AAED;;AAEG;AACH,SAAS,wBAAwB,CAC/B,YAAgC,EAChC,QAAgB,EAChB,QAAgB,EAChB,eAAoB,EACpB,OAAoC,EAAA;;;AAIpC,IAAA,IAAI,YAAY,IAAI,CAAC,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC/E,MAAM,MAAM,GAAQ,EAAE,CAAC;AACvB,QAAA,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,eAAe,CAAC;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,EAAE,CAAC,QAAQ,GAAG,YAAY,EAAE,CAAC;AACnD,QAAA,OAAO,MAAM,CAAC;AACf,KAAA;AAED,IAAA,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC,EAAA;AAEjC,IAAA,IAAI,MAAc,CAAC;AACnB,IAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;AAChB,KAAA;AAAM,SAAA;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;AAC/B,QAAA,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;AAC/B,YAAA,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AACxB,SAAA;AACF,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,iBAAmD,EACnD,OAA6B,EAAA;IAE7B,MAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;AAClF,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,SAAS,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AACnC,KAAA;IAED,MAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,MAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;AAC/C,QAAA,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;AAC5F,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CAAC,cAAc,EAAE,CAAC,CAAC;IACjC,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;AAEzE,IAAA,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;AAC1B,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;AACzC,QAAA,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;AACzC,KAAA;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;AAE3E,IAAA,IAAI,MAAM,EAAE;QACV,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,SAAS,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC;AAEnD,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED;;;;;AAKG;AACa,SAAA,yBAAyB,CACvC,eAAwC,EACxC,iBAAwC,EAAA;IAExC,MAAM,sBAAsB,GAA2B,EAAE,CAAC;IAE1D,IAAI,eAAe,CAAC,iBAAiB,EAAE;AACrC,QAAA,sBAAsB,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,CAAC;AAC7C,KAAA;IAED,IAAI,cAAc,GAAG,SAAS,CAAC;IAC/B,IAAI,eAAe,CAAC,gBAAgB,IAAI,eAAe,CAAC,gBAAgB,CAAC,eAAe,EAAE;QACxF,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,gBAAgB,CAAC,eAAe,CAAC,CAAC;;;AAIrE,QAAA,MAAM,oBAAoB,GAAG,wBAAwB,EAAE,CAAC;QACxD,IAAI,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC,KAAK,CAAC,CAAC,EAAE;AACtD,YAAA,aAAa,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAC1C,SAAA;AAED,QAAA,cAAc,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC1C,KAAA;IAED,MAAM,gBAAgB,mCACjB,uBAAuB,CAAA,EACvB,eAAe,CAAC,gBAAgB,CACpC,CAAC;IAEF,MAAM,YAAY,mCACb,mBAAmB,CAAA,EACnB,eAAe,CAAC,YAAY,CAChC,CAAC;IAEF,MAAM,eAAe,mCAChB,sBAAsB,CAAA,EACtB,eAAe,CAAC,eAAe,CACnC,CAAC;AAEF,IAAA,IAAI,MAAM,EAAE;QACV,sBAAsB,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AACxE,KAAA;IAED,MAAM,sBAAsB,mCACvB,6BAA6B,CAAA,EAC7B,eAAe,CAAC,sBAAsB,CAC1C,CAAC;AAEF,IAAA,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,eAAe,CAAC,cAAc,CAClC,CAAC;IAEF,sBAAsB,CAAC,IAAI,CACzB,aAAa,CAAC,EAAE,SAAS,EAAE,cAAc,EAAE,CAAC,EAC5C,eAAe,CAAC,gBAAgB,CAAC,EACjC,eAAe,CAAC,EAAE,KAAK,EAAE,cAAc,EAAE,CAAC,EAC1C,6BAA6B,EAAE,EAC/B,qBAAqB,CAAC,sBAAsB,CAAC,oBAAoB,CAAC,EAClE,qBAAqB,EAAE,EACvB,sBAAsB,EAAE,EACxB,sBAAsB,CACpB,YAAY,CAAC,UAAU,EACvB,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB,CAC/B,CACF,CAAC;IAEF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,sBAAsB,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;AACzE,KAAA;AAED,IAAA,IAAI,iBAAiB,EAAE;AACrB,QAAA,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;AAChD,KAAA;IAED,sBAAsB,CAAC,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,CAAC;AAEvD,IAAA,IAAI,MAAM,IAAI,eAAe,CAAC,kBAAkB,KAAK,KAAK,EAAE;AAC1D,QAAA,sBAAsB,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;AACnE,KAAA;IAED,OAAO;QACL,UAAU,EAAE,eAAe,CAAC,UAAU;QACtC,sBAAsB;KACvB,CAAC;AACJ,CAAC;AAsBD,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB,EAAA;AAEtB,IAAA,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB,EAAA;;AAEtB,IAAA,IAAI,KAAU,CAAC;AACf,IAAA,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;AACrC,QAAA,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;AACjC,KAAA;IACD,MAAM,iBAAiB,GAAG,CAAA,EAAA,GAAA,kBAAkB,CAAC,OAAO,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,iBAAiB,CAAC;AACxE,IAAA,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;AAChC,QAAA,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;AAC9B,gBAAA,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;AACtC,aAAA;AAAM,iBAAA;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;AACF,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;AACvC,oBAAA,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;AACnF,iBAAA;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;AAC5B,gBAAA,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;AACb,wBAAA,eAAe,CAAC,QAAQ;AACxB,6BAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;AAClE,iBAAA;AACD,gBAAA,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;AAC7F,aAAA;;YAGD,MAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,EAAE,iBAAiB,CAAC,CAAC;AACtF,SAAA;AACF,KAAA;AAAM,SAAA;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;AACZ,SAAA;AAED,QAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;YACxC,MAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;AACF,YAAA,MAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;AAChE,YAAA,MAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,MAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,EAAE,iBAAiB,CAAC,CAAC;AAC3F,YAAA,IAAI,aAAa,KAAK,SAAS,IAAI,aAAa,KAAK,IAAI,EAAE;gBACzD,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;AACZ,iBAAA;AACD,gBAAA,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;AACrC,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB,EAAA;AAEvB,IAAA,MAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;AACpC,QAAA,MAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,KAAK,SAAS,IAAI,MAAM,KAAK,IAAI,IAAI,iBAAiB,IAAI,MAAM,EAAE;AAC1E,YAAA,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AAAM,aAAA;YACL,MAAM;AACP,SAAA;AACF,KAAA;AACD,IAAA,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;AAC9B,QAAA,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;AAC7B,KAAA;AACD,IAAA,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAC7B,SAAgC,EAChC,YAA2C,EAAA;AAE3C,IAAA,MAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;AAC9C,IAAA,MAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;AAE3D,IAAA,MAAM,oBAAoB,GAAG,CAC3B,GAAM,KAGJ;AACF,QAAA,OAAO,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;AAC7C,YAAA,KAAK,EAAE,SAAS;AACjB,SAAA,CAEA,CAAC;AACJ,KAAC,CAAC;AAEF,IAAA,IAAI,UAAU,EAAE;AACd,QAAA,MAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;AACzB,YAAA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;AACJ,SAAA;AAED,QAAA,MAAM,eAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,IAAI,CAC1D,CAAC,CAAC,KAAK,eAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,CAChD,CAAC;AACF,QAAA,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;AACjD,YAAA,MAAM,aAAa,GAAG,CAAC,IAAI,SAAS,CAAC,UAAU,IAAI,EAAE,CAAC,CAAyB,CAAC;YAEhF,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE;AAC9C,gBAAA,IAAI,eAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChD,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;gBACjB,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE;oBAC5C,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AACzC,iBAAA;AACF,aAAA;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;AACpC,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAED,QAAA,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACJ,SAAA;AACF,KAAA;AAED,IAAA,IACE,UAAU;AACV,QAAA,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;AACnC,QAAAC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,CACtB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,aAAa,CAChB,EAAA,EAAA,IAAI,EAAE,SAAS,CAAC,UAAU,EAAA,CAAA,CAC1B,CAAC;AACJ,KAAA;IAED,OAAO,oBAAoB,iCACtB,aAAa,CAAA,EACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;AAED,SAAS,mBAAmB,CAC1B,OAA8B,EAC9B,OAAgB,EAAA;AAEhB,IAAA,IAAI,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,gBAAgB,EAAE;AAC7B,QAAA,MAAM,MAAM,GAAG,OAAO,CAAC,gBAAgB,CAAC;AACxC,QAAA,OAAO,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC;AAC1B,cAAE,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,KAAK,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,QAAQ,EAAE,CAAC;cAChD,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC,QAAQ,EAAE,CAAC;AAChC,KAAA;AAED,IAAA,IAAI,OAAO,EAAE;QACX,OAAO,CAAA,EAAG,OAAO,CAAA,SAAA,CAAW,CAAC;AAC9B,KAAA;AACD,IAAA,OAAO,SAAS,CAAC;AACnB;;ACrjCA;AA2BA;;;;;;;;AAQG;AACG,SAAU,kBAAkB,CAChC,IAAgB,EAAA;AAKhB,IAAA,OAAOC,8BAA6B,CAAC,IAAI,CAAC,CAAC;AAC7C;;AC3CA;AACA;AAIA;;AAEG;AACI,MAAM,oBAAoB,GAAG,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC;AAqBlD;;;;;;AAMG;MACU,wBAAwB,CAAA;AAInC;;;AAGG;AACH,IAAA,WAAA,CAAY,uBAA+B,oBAAoB,EAAA;QANvD,IAAW,CAAA,WAAA,GAAiB,SAAS,CAAC;AAO5C,QAAA,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;KAClD;AAED;;;AAGG;AACH,IAAA,cAAc,CAAC,WAAoC,EAAA;AACjD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;KAChC;AAED;;AAEG;IACH,cAAc,GAAA;QACZ,IACE,IAAI,CAAC,WAAW;AAChB,YAAA,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,oBAAoB,IAAI,IAAI,CAAC,WAAW,CAAC,kBAAkB,EAC7E;AACA,YAAA,IAAI,CAAC,WAAW,GAAG,SAAS,CAAC;AAC9B,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF;;ACrED;AACA;AAIA;;;;AAIG;MACU,oBAAoB,CAAA;AAI/B,IAAA,WAAA,CACU,UAA2B,EAC3B,MAAyB,EACzB,uCAA+C,KAAK,EAAA;QAFpD,IAAU,CAAA,UAAA,GAAV,UAAU,CAAiB;QAC3B,IAAM,CAAA,MAAA,GAAN,MAAM,CAAmB;QACzB,IAAoC,CAAA,oCAAA,GAApC,oCAAoC,CAAgB;QALtD,IAAU,CAAA,UAAA,GAAG,CAAC,CAAC;KAMnB;AAEJ;;;AAGG;IACI,OAAO,GAAA;;AAEZ,QAAA,QACE,CAAC,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,oCAAoC,EAC5F;KACH;AAED;;;;;AAKG;IACK,MAAM,QAAQ,CAAC,OAAwB,EAAA;AAC7C,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;AAC7B,QAAA,MAAM,KAAK,GAAG,MAAM,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACnE,QAAA,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;QACzB,OAAO,KAAK,IAAI,SAAS,CAAC;KAC3B;AAED;;;AAGG;AACI,IAAA,OAAO,CAAC,OAAwB,EAAA;AACrC,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AACF;;ACvDD;AASA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,MAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;;AAEG;MACU,8BAA8B,CAAA;AAiBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACE,QAAgB,EAChB,QAAgB,EAChB,sBAA8B,4BAA4B,EAAA;AAhB5D;;;AAGG;QACH,IAAmB,CAAA,mBAAA,GAAW,4BAA4B,CAAC;AAczD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;AACzF,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,MAAM,WAAW,GAAG,CAAA,EAAG,IAAI,CAAC,QAAQ,CAAA,CAAA,EAAI,IAAI,CAAC,QAAQ,CAAA,CAAE,CAAC;AACxD,QAAA,MAAM,kBAAkB,GAAG,CAAG,EAAA,IAAI,CAAC,mBAAmB,CAAA,CAAA,EAAIC,YAAmB,CAAC,WAAW,CAAC,EAAE,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;AAAE,YAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;AAC3E,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACpED;AAqBA;;AAEG;MACU,iBAAiB,CAAA;AAU5B;;AAEG;AACH,IAAA,WAAA,CAAY,OAAgC,EAAA;AAC1C,QAAA,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;AAClE,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,wHAAA,CAA0H,CAC3H,CAAC;AACH,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;AACjC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CAAC,WAA4B,EAAA;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,CAAA,qEAAA,CAAuE,CAAC,CACnF,CAAC;AACH,SAAA;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;AACjB,YAAA,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;AACxB,gBAAA,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;AACzC,aAAA;AACD,YAAA,KAAK,MAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;AACtC,gBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;AAChE,aAAA;AACF,SAAA;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,CAAA,yCAAA,CAA2C,CAAC,CAAC,CAAC;AAC/E,aAAA;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;AACpC,gBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,aAAA;AACD,YAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;AAClC,oBAAA,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;AACxB,iBAAA;AACD,gBAAA,WAAW,CAAC,GAAG,IAAI,CAAA,EAAG,GAAG,CAAA,CAAA,EAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC;AAClD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;AACF;;ACtFD;AAKA;;AAEG;AACG,MAAO,gBAAiB,SAAQ,iBAAiB,CAAA;AACrD;;;;AAIG;AACH,IAAA,WAAA,CAAY,QAAgB,EAAA;QAC1B,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;AAC3D,YAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;AACrF,SAAA;AACD,QAAA,MAAM,OAAO,GAA4B;AACvC,YAAA,QAAQ,EAAE;AACR,gBAAA,aAAa,EAAE,QAAQ;AACxB,aAAA;SACF,CAAC;QACF,KAAK,CAAC,OAAO,CAAC,CAAC;KAChB;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/dom-shim.d.ts b/node_modules/@azure/core-http/dom-shim.d.ts new file mode 100644 index 0000000000..cb4ac4d933 --- /dev/null +++ b/node_modules/@azure/core-http/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/core-http/node_modules/form-data/License b/node_modules/@azure/core-http/node_modules/form-data/License new file mode 100644 index 0000000000..c7ff12a2f8 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@azure/core-http/node_modules/form-data/README.md.bak b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak new file mode 100644 index 0000000000..298a1a2405 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/README.md.bak @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/Readme.md b/node_modules/@azure/core-http/node_modules/form-data/Readme.md new file mode 100644 index 0000000000..298a1a2405 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/Readme.md @@ -0,0 +1,358 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v4.0.0.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v4.0.0.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- ```getLength(cb)``` will send an error as first parameter of callback if stream length cannot be calculated (e.g. send in custom streams w/o using ```knownLength```). +- ```submit``` will not add `content-length` if form length is unknown or not calculable. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@azure/core-http/node_modules/form-data/index.d.ts b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts new file mode 100644 index 0000000000..295e9e9bc5 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000000..09e7c70e6e --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000000..18dc819cdc --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/form_data.js @@ -0,0 +1,501 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var Stream = require('stream').Stream; +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response or not a stream + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) && !(value instanceof Stream))) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err && err !== 'Unknown stream') { + this._error(err); + return; + } + + // add content length + if (length) { + request.setHeader('Content-Length', length); + } + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000000..4d35738dd5 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@azure/core-http/node_modules/form-data/package.json b/node_modules/@azure/core-http/node_modules/form-data/package.json new file mode 100644 index 0000000000..0f20240bfb --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE b/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE new file mode 100644 index 0000000000..22204e8758 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/LICENSE @@ -0,0 +1,12 @@ +Copyright (c) 2015, Salesforce.com, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/README.md b/node_modules/@azure/core-http/node_modules/tough-cookie/README.md new file mode 100644 index 0000000000..2dc9496d82 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/README.md @@ -0,0 +1,582 @@ +[RFC6265](https://tools.ietf.org/html/rfc6265) Cookies and CookieJar for Node.js + +[![npm package](https://nodei.co/npm/tough-cookie.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/tough-cookie/) + +[![Build Status](https://travis-ci.org/salesforce/tough-cookie.svg?branch=master)](https://travis-ci.org/salesforce/tough-cookie) + +# Synopsis + +``` javascript +var tough = require('tough-cookie'); +var Cookie = tough.Cookie; +var cookie = Cookie.parse(header); +cookie.value = 'somethingdifferent'; +header = cookie.toString(); + +var cookiejar = new tough.CookieJar(); +cookiejar.setCookie(cookie, 'http://currentdomain.example.com/path', cb); +// ... +cookiejar.getCookies('http://example.com/otherpath',function(err,cookies) { + res.headers['cookie'] = cookies.join('; '); +}); +``` + +# Installation + +It's _so_ easy! + +`npm install tough-cookie` + +Why the name? NPM modules `cookie`, `cookies` and `cookiejar` were already taken. + +## Version Support + +Support for versions of node.js will follow that of the [request](https://www.npmjs.com/package/request) module. + +# API + +## tough + +Functions on the module you get from `require('tough-cookie')`. All can be used as pure functions and don't need to be "bound". + +**Note**: prior to 1.0.x, several of these functions took a `strict` parameter. This has since been removed from the API as it was no longer necessary. + +### `parseDate(string)` + +Parse a cookie date string into a `Date`. Parses according to RFC6265 Section 5.1.1, not `Date.parse()`. + +### `formatDate(date)` + +Format a Date into a RFC1123 string (the RFC6265-recommended format). + +### `canonicalDomain(str)` + +Transforms a domain-name into a canonical domain-name. The canonical domain-name is a trimmed, lowercased, stripped-of-leading-dot and optionally punycode-encoded domain-name (Section 5.1.2 of RFC6265). For the most part, this function is idempotent (can be run again on its output without ill effects). + +### `domainMatch(str,domStr[,canonicalize=true])` + +Answers "does this real domain match the domain in a cookie?". The `str` is the "current" domain-name and the `domStr` is the "cookie" domain-name. Matches according to RFC6265 Section 5.1.3, but it helps to think of it as a "suffix match". + +The `canonicalize` parameter will run the other two parameters through `canonicalDomain` or not. + +### `defaultPath(path)` + +Given a current request/response path, gives the Path apropriate for storing in a cookie. This is basically the "directory" of a "file" in the path, but is specified by Section 5.1.4 of the RFC. + +The `path` parameter MUST be _only_ the pathname part of a URI (i.e. excludes the hostname, query, fragment, etc.). This is the `.pathname` property of node's `uri.parse()` output. + +### `pathMatch(reqPath,cookiePath)` + +Answers "does the request-path path-match a given cookie-path?" as per RFC6265 Section 5.1.4. Returns a boolean. + +This is essentially a prefix-match where `cookiePath` is a prefix of `reqPath`. + +### `parse(cookieString[, options])` + +alias for `Cookie.parse(cookieString[, options])` + +### `fromJSON(string)` + +alias for `Cookie.fromJSON(string)` + +### `getPublicSuffix(hostname)` + +Returns the public suffix of this hostname. The public suffix is the shortest domain-name upon which a cookie can be set. Returns `null` if the hostname cannot have cookies set for it. + +For example: `www.example.com` and `www.subdomain.example.com` both have public suffix `example.com`. + +For further information, see http://publicsuffix.org/. This module derives its list from that site. This call is currently a wrapper around [`psl`](https://www.npmjs.com/package/psl)'s [get() method](https://www.npmjs.com/package/psl#pslgetdomain). + +### `cookieCompare(a,b)` + +For use with `.sort()`, sorts a list of cookies into the recommended order given in the RFC (Section 5.4 step 2). The sort algorithm is, in order of precedence: + +* Longest `.path` +* oldest `.creation` (which has a 1ms precision, same as `Date`) +* lowest `.creationIndex` (to get beyond the 1ms precision) + +``` javascript +var cookies = [ /* unsorted array of Cookie objects */ ]; +cookies = cookies.sort(cookieCompare); +``` + +**Note**: Since JavaScript's `Date` is limited to a 1ms precision, cookies within the same milisecond are entirely possible. This is especially true when using the `now` option to `.setCookie()`. The `.creationIndex` property is a per-process global counter, assigned during construction with `new Cookie()`. This preserves the spirit of the RFC sorting: older cookies go first. This works great for `MemoryCookieStore`, since `Set-Cookie` headers are parsed in order, but may not be so great for distributed systems. Sophisticated `Store`s may wish to set this to some other _logical clock_ such that if cookies A and B are created in the same millisecond, but cookie A is created before cookie B, then `A.creationIndex < B.creationIndex`. If you want to alter the global counter, which you probably _shouldn't_ do, it's stored in `Cookie.cookiesCreated`. + +### `permuteDomain(domain)` + +Generates a list of all possible domains that `domainMatch()` the parameter. May be handy for implementing cookie stores. + +### `permutePath(path)` + +Generates a list of all possible paths that `pathMatch()` the parameter. May be handy for implementing cookie stores. + + +## Cookie + +Exported via `tough.Cookie`. + +### `Cookie.parse(cookieString[, options])` + +Parses a single Cookie or Set-Cookie HTTP header into a `Cookie` object. Returns `undefined` if the string can't be parsed. + +The options parameter is not required and currently has only one property: + + * _loose_ - boolean - if `true` enable parsing of key-less cookies like `=abc` and `=`, which are not RFC-compliant. + +If options is not an object, it is ignored, which means you can use `Array#map` with it. + +Here's how to process the Set-Cookie header(s) on a node HTTP/HTTPS response: + +``` javascript +if (res.headers['set-cookie'] instanceof Array) + cookies = res.headers['set-cookie'].map(Cookie.parse); +else + cookies = [Cookie.parse(res.headers['set-cookie'])]; +``` + +_Note:_ in version 2.3.3, tough-cookie limited the number of spaces before the `=` to 256 characters. This limitation has since been removed. +See [Issue 92](https://github.com/salesforce/tough-cookie/issues/92) + +### Properties + +Cookie object properties: + + * _key_ - string - the name or key of the cookie (default "") + * _value_ - string - the value of the cookie (default "") + * _expires_ - `Date` - if set, the `Expires=` attribute of the cookie (defaults to the string `"Infinity"`). See `setExpires()` + * _maxAge_ - seconds - if set, the `Max-Age=` attribute _in seconds_ of the cookie. May also be set to strings `"Infinity"` and `"-Infinity"` for non-expiry and immediate-expiry, respectively. See `setMaxAge()` + * _domain_ - string - the `Domain=` attribute of the cookie + * _path_ - string - the `Path=` of the cookie + * _secure_ - boolean - the `Secure` cookie flag + * _httpOnly_ - boolean - the `HttpOnly` cookie flag + * _sameSite_ - string - the `SameSite` cookie attribute (from [RFC6265bis]); must be one of `none`, `lax`, or `strict` + * _extensions_ - `Array` - any unrecognized cookie attributes as strings (even if equal-signs inside) + * _creation_ - `Date` - when this cookie was constructed + * _creationIndex_ - number - set at construction, used to provide greater sort precision (please see `cookieCompare(a,b)` for a full explanation) + +After a cookie has been passed through `CookieJar.setCookie()` it will have the following additional attributes: + + * _hostOnly_ - boolean - is this a host-only cookie (i.e. no Domain field was set, but was instead implied) + * _pathIsDefault_ - boolean - if true, there was no Path field on the cookie and `defaultPath()` was used to derive one. + * _creation_ - `Date` - **modified** from construction to when the cookie was added to the jar + * _lastAccessed_ - `Date` - last time the cookie got accessed. Will affect cookie cleaning once implemented. Using `cookiejar.getCookies(...)` will update this attribute. + +### `Cookie([{properties}])` + +Receives an options object that can contain any of the above Cookie properties, uses the default for unspecified properties. + +### `.toString()` + +encode to a Set-Cookie header value. The Expires cookie field is set using `formatDate()`, but is omitted entirely if `.expires` is `Infinity`. + +### `.cookieString()` + +encode to a Cookie header value (i.e. the `.key` and `.value` properties joined with '='). + +### `.setExpires(String)` + +sets the expiry based on a date-string passed through `parseDate()`. If parseDate returns `null` (i.e. can't parse this date string), `.expires` is set to `"Infinity"` (a string) is set. + +### `.setMaxAge(number)` + +sets the maxAge in seconds. Coerces `-Infinity` to `"-Infinity"` and `Infinity` to `"Infinity"` so it JSON serializes correctly. + +### `.expiryTime([now=Date.now()])` + +### `.expiryDate([now=Date.now()])` + +expiryTime() Computes the absolute unix-epoch milliseconds that this cookie expires. expiryDate() works similarly, except it returns a `Date` object. Note that in both cases the `now` parameter should be milliseconds. + +Max-Age takes precedence over Expires (as per the RFC). The `.creation` attribute -- or, by default, the `now` parameter -- is used to offset the `.maxAge` attribute. + +If Expires (`.expires`) is set, that's returned. + +Otherwise, `expiryTime()` returns `Infinity` and `expiryDate()` returns a `Date` object for "Tue, 19 Jan 2038 03:14:07 GMT" (latest date that can be expressed by a 32-bit `time_t`; the common limit for most user-agents). + +### `.TTL([now=Date.now()])` + +compute the TTL relative to `now` (milliseconds). The same precedence rules as for `expiryTime`/`expiryDate` apply. + +The "number" `Infinity` is returned for cookies without an explicit expiry and `0` is returned if the cookie is expired. Otherwise a time-to-live in milliseconds is returned. + +### `.canonicalizedDomain()` + +### `.cdomain()` + +return the canonicalized `.domain` field. This is lower-cased and punycode (RFC3490) encoded if the domain has any non-ASCII characters. + +### `.toJSON()` + +For convenience in using `JSON.serialize(cookie)`. Returns a plain-old `Object` that can be JSON-serialized. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are exported in ISO format (`.toISOString()`). + +**NOTE**: Custom `Cookie` properties will be discarded. In tough-cookie 1.x, since there was no `.toJSON` method explicitly defined, all enumerable properties were captured. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +### `Cookie.fromJSON(strOrObj)` + +Does the reverse of `cookie.toJSON()`. If passed a string, will `JSON.parse()` that first. + +Any `Date` properties (i.e., `.expires`, `.creation`, and `.lastAccessed`) are parsed via `Date.parse()`, not the tough-cookie `parseDate`, since it's JavaScript/JSON-y timestamps being handled at this layer. + +Returns `null` upon JSON parsing error. + +### `.clone()` + +Does a deep clone of this cookie, exactly implemented as `Cookie.fromJSON(cookie.toJSON())`. + +### `.validate()` + +Status: *IN PROGRESS*. Works for a few things, but is by no means comprehensive. + +validates cookie attributes for semantic correctness. Useful for "lint" checking any Set-Cookie headers you generate. For now, it returns a boolean, but eventually could return a reason string -- you can future-proof with this construct: + +``` javascript +if (cookie.validate() === true) { + // it's tasty +} else { + // yuck! +} +``` + + +## CookieJar + +Exported via `tough.CookieJar`. + +### `CookieJar([store],[options])` + +Simply use `new CookieJar()`. If you'd like to use a custom store, pass that to the constructor otherwise a `MemoryCookieStore` will be created and used. + +The `options` object can be omitted and can have the following properties: + + * _rejectPublicSuffixes_ - boolean - default `true` - reject cookies with domains like "com" and "co.uk" + * _looseMode_ - boolean - default `false` - accept malformed cookies like `bar` and `=bar`, which have an implied empty name. + * _prefixSecurity_ - string - default `silent` - set to `'unsafe-disabled'`, `'silent'`, or `'strict'`. See [Cookie Prefixes] below. + * _allowSpecialUseDomain_ - boolean - default `false` - accepts special-use domain suffixes, such as `local`. Useful for testing purposes. + This is not in the standard, but is used sometimes on the web and is accepted by (most) browsers. + +Since eventually this module would like to support database/remote/etc. CookieJars, continuation passing style is used for CookieJar methods. + +### `.setCookie(cookieOrString, currentUrl, [{options},] cb(err,cookie))` + +Attempt to set the cookie in the cookie jar. If the operation fails, an error will be given to the callback `cb`, otherwise the cookie is passed through. The cookie will have updated `.creation`, `.lastAccessed` and `.hostOnly` properties. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _ignoreError_ - boolean - default `false` - silently ignore things like parse errors and invalid domains. `Store` errors aren't ignored by this option. + * _sameSiteContext_ - string - default unset - set to `'none'`, `'lax'`, or `'strict'` See [SameSite Cookies] below. + +As per the RFC, the `.hostOnly` property is set if there was no "Domain=" parameter in the cookie string (or `.domain` was null on the Cookie object). The `.domain` property is set to the fully-qualified hostname of `currentUrl` in this case. Matching this cookie requires an exact hostname match (not a `domainMatch` as per usual). + +### `.setCookieSync(cookieOrString, currentUrl, [{options}])` + +Synchronous version of `setCookie`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookies(currentUrl, [{options},] cb(err,cookies))` + +Retrieve the list of cookies that can be sent in a Cookie header for the current url. + +If an error is encountered, that's passed as `err` to the callback, otherwise an `Array` of `Cookie` objects is passed. The array is sorted with `cookieCompare()` unless the `{sort:false}` option is given. + +The `options` object can be omitted and can have the following properties: + + * _http_ - boolean - default `true` - indicates if this is an HTTP or non-HTTP API. Affects HttpOnly cookies. + * _secure_ - boolean - autodetect from url - indicates if this is a "Secure" API. If the currentUrl starts with `https:` or `wss:` then this is defaulted to `true`, otherwise `false`. + * _now_ - Date - default `new Date()` - what to use for the creation/access time of cookies + * _expire_ - boolean - default `true` - perform expiry-time checking of cookies and asynchronously remove expired cookies from the store. Using `false` will return expired cookies and **not** remove them from the store (which is useful for replaying Set-Cookie headers, potentially). + * _allPaths_ - boolean - default `false` - if `true`, do not scope cookies by path. The default uses RFC-compliant path scoping. **Note**: may not be supported by the underlying store (the default `MemoryCookieStore` supports it). + * _sameSiteContext_ - string - default unset - Set this to `'none'`, `'lax'` or `'strict'` to enforce SameSite cookies upon retrival. See [SameSite Cookies] below. + +The `.lastAccessed` property of the returned cookies will have been updated. + +### `.getCookiesSync(currentUrl, [{options}])` + +Synchronous version of `getCookies`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getCookieString(...)` + +Accepts the same options as `.getCookies()` but passes a string suitable for a Cookie header rather than an array to the callback. Simply maps the `Cookie` array via `.cookieString()`. + +### `.getCookieStringSync(...)` + +Synchronous version of `getCookieString`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.getSetCookieStrings(...)` + +Returns an array of strings suitable for **Set-Cookie** headers. Accepts the same options as `.getCookies()`. Simply maps the cookie array via `.toString()`. + +### `.getSetCookieStringsSync(...)` + +Synchronous version of `getSetCookieStrings`; only works with synchronous stores (e.g. the default `MemoryCookieStore`). + +### `.serialize(cb(err,serializedObject))` + +Serialize the Jar if the underlying store supports `.getAllCookies`. + +**NOTE**: Custom `Cookie` properties will be discarded. If you want a property to be serialized, add the property name to the `Cookie.serializableProperties` Array. + +See [Serialization Format]. + +### `.serializeSync()` + +Sync version of .serialize + +### `.toJSON()` + +Alias of .serializeSync() for the convenience of `JSON.stringify(cookiejar)`. + +### `CookieJar.deserialize(serialized, [store], cb(err,object))` + +A new Jar is created and the serialized Cookies are added to the underlying store. Each `Cookie` is added via `store.putCookie` in the order in which they appear in the serialization. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. + +As a convenience, if `serialized` is a string, it is passed through `JSON.parse` first. If that throws an error, this is passed to the callback. + +### `CookieJar.deserializeSync(serialized, [store])` + +Sync version of `.deserialize`. _Note_ that the `store` must be synchronous for this to work. + +### `CookieJar.fromJSON(string)` + +Alias of `.deserializeSync` to provide consistency with `Cookie.fromJSON()`. + +### `.clone([store,]cb(err,newJar))` + +Produces a deep clone of this jar. Modifications to the original won't affect the clone, and vice versa. + +The `store` argument is optional, but should be an instance of `Store`. By default, a new instance of `MemoryCookieStore` is created. Transferring between store types is supported so long as the source implements `.getAllCookies()` and the destination implements `.putCookie()`. + +### `.cloneSync([store])` + +Synchronous version of `.clone`, returning a new `CookieJar` instance. + +The `store` argument is optional, but must be a _synchronous_ `Store` instance if specified. If not passed, a new instance of `MemoryCookieStore` is used. + +The _source_ and _destination_ must both be synchronous `Store`s. If one or both stores are asynchronous, use `.clone` instead. Recall that `MemoryCookieStore` supports both synchronous and asynchronous API calls. + +### `.removeAllCookies(cb(err))` + +Removes all cookies from the jar. + +This is a new backwards-compatible feature of `tough-cookie` version 2.5, so not all Stores will implement it efficiently. For Stores that do not implement `removeAllCookies`, the fallback is to call `removeCookie` after `getAllCookies`. If `getAllCookies` fails or isn't implemented in the Store, that error is returned. If one or more of the `removeCookie` calls fail, only the first error is returned. + +### `.removeAllCookiesSync()` + +Sync version of `.removeAllCookies()` + +## Store + +Base class for CookieJar stores. Available as `tough.Store`. + +## Store API + +The storage model for each `CookieJar` instance can be replaced with a custom implementation. The default is `MemoryCookieStore` which can be found in the `lib/memstore.js` file. The API uses continuation-passing-style to allow for asynchronous stores. + +Stores should inherit from the base `Store` class, which is available as `require('tough-cookie').Store`. + +Stores are asynchronous by default, but if `store.synchronous` is set to `true`, then the `*Sync` methods on the of the containing `CookieJar` can be used (however, the continuation-passing style + +All `domain` parameters will have been normalized before calling. + +The Cookie store must have all of the following methods. + +### `store.findCookie(domain, path, key, cb(err,cookie))` + +Retrieve a cookie with the given domain, path and key (a.k.a. name). The RFC maintains that exactly one of these cookies should exist in a store. If the store is using versioning, this means that the latest/newest such cookie should be returned. + +Callback takes an error and the resulting `Cookie` object. If no cookie is found then `null` MUST be passed instead (i.e. not an error). + +### `store.findCookies(domain, path, cb(err,cookies))` + +Locates cookies matching the given domain and path. This is most often called in the context of `cookiejar.getCookies()` above. + +If no cookies are found, the callback MUST be passed an empty array. + +The resulting list will be checked for applicability to the current request according to the RFC (domain-match, path-match, http-only-flag, secure-flag, expiry, etc.), so it's OK to use an optimistic search algorithm when implementing this method. However, the search algorithm used SHOULD try to find cookies that `domainMatch()` the domain and `pathMatch()` the path in order to limit the amount of checking that needs to be done. + +As of version 0.9.12, the `allPaths` option to `cookiejar.getCookies()` above will cause the path here to be `null`. If the path is `null`, path-matching MUST NOT be performed (i.e. domain-matching only). + +### `store.putCookie(cookie, cb(err))` + +Adds a new cookie to the store. The implementation SHOULD replace any existing cookie with the same `.domain`, `.path`, and `.key` properties -- depending on the nature of the implementation, it's possible that between the call to `fetchCookie` and `putCookie` that a duplicate `putCookie` can occur. + +The `cookie` object MUST NOT be modified; the caller will have already updated the `.creation` and `.lastAccessed` properties. + +Pass an error if the cookie cannot be stored. + +### `store.updateCookie(oldCookie, newCookie, cb(err))` + +Update an existing cookie. The implementation MUST update the `.value` for a cookie with the same `domain`, `.path` and `.key`. The implementation SHOULD check that the old value in the store is equivalent to `oldCookie` - how the conflict is resolved is up to the store. + +The `.lastAccessed` property will always be different between the two objects (to the precision possible via JavaScript's clock). Both `.creation` and `.creationIndex` are guaranteed to be the same. Stores MAY ignore or defer the `.lastAccessed` change at the cost of affecting how cookies are selected for automatic deletion (e.g., least-recently-used, which is up to the store to implement). + +Stores may wish to optimize changing the `.value` of the cookie in the store versus storing a new cookie. If the implementation doesn't define this method a stub that calls `putCookie(newCookie,cb)` will be added to the store object. + +The `newCookie` and `oldCookie` objects MUST NOT be modified. + +Pass an error if the newCookie cannot be stored. + +### `store.removeCookie(domain, path, key, cb(err))` + +Remove a cookie from the store (see notes on `findCookie` about the uniqueness constraint). + +The implementation MUST NOT pass an error if the cookie doesn't exist; only pass an error due to the failure to remove an existing cookie. + +### `store.removeCookies(domain, path, cb(err))` + +Removes matching cookies from the store. The `path` parameter is optional, and if missing means all paths in a domain should be removed. + +Pass an error ONLY if removing any existing cookies failed. + +### `store.removeAllCookies(cb(err))` + +_Optional_. Removes all cookies from the store. + +Pass an error if one or more cookies can't be removed. + +**Note**: New method as of `tough-cookie` version 2.5, so not all Stores will implement this, plus some stores may choose not to implement this. + +### `store.getAllCookies(cb(err, cookies))` + +_Optional_. Produces an `Array` of all cookies during `jar.serialize()`. The items in the array can be true `Cookie` objects or generic `Object`s with the [Serialization Format] data structure. + +Cookies SHOULD be returned in creation order to preserve sorting via `compareCookies()`. For reference, `MemoryCookieStore` will sort by `.creationIndex` since it uses true `Cookie` objects internally. If you don't return the cookies in creation order, they'll still be sorted by creation time, but this only has a precision of 1ms. See `compareCookies` for more detail. + +Pass an error if retrieval fails. + +**Note**: not all Stores can implement this due to technical limitations, so it is optional. + +## MemoryCookieStore + +Inherits from `Store`. + +A just-in-memory CookieJar synchronous store implementation, used by default. Despite being a synchronous implementation, it's usable with both the synchronous and asynchronous forms of the `CookieJar` API. Supports serialization, `getAllCookies`, and `removeAllCookies`. + +## Community Cookie Stores + +These are some Store implementations authored and maintained by the community. They aren't official and we don't vouch for them but you may be interested to have a look: + +- [`db-cookie-store`](https://github.com/JSBizon/db-cookie-store): SQL including SQLite-based databases +- [`file-cookie-store`](https://github.com/JSBizon/file-cookie-store): Netscape cookie file format on disk +- [`redis-cookie-store`](https://github.com/benkroeger/redis-cookie-store): Redis +- [`tough-cookie-filestore`](https://github.com/mitsuru/tough-cookie-filestore): JSON on disk +- [`tough-cookie-web-storage-store`](https://github.com/exponentjs/tough-cookie-web-storage-store): DOM localStorage and sessionStorage + + +# Serialization Format + +**NOTE**: if you want to have custom `Cookie` properties serialized, add the property name to `Cookie.serializableProperties`. + +```js + { + // The version of tough-cookie that serialized this jar. + version: 'tough-cookie@1.x.y', + + // add the store type, to make humans happy: + storeType: 'MemoryCookieStore', + + // CookieJar configuration: + rejectPublicSuffixes: true, + // ... future items go here + + // Gets filled from jar.store.getAllCookies(): + cookies: [ + { + key: 'string', + value: 'string', + // ... + /* other Cookie.serializableProperties go here */ + } + ] + } +``` + +# RFC6265bis + +Support for RFC6265bis revision 02 is being developed. Since this is a bit of an omnibus revision to the RFC6252, support is broken up into the functional areas. + +## Leave Secure Cookies Alone + +Not yet supported. + +This change makes it so that if a cookie is sent from the server to the client with a `Secure` attribute, the channel must also be secure or the cookie is ignored. + +## SameSite Cookies + +Supported. + +This change makes it possible for servers, and supporting clients, to mitigate certain types of CSRF attacks by disallowing `SameSite` cookies from being sent cross-origin. + +On the Cookie object itself, you can get/set the `.sameSite` attribute, which will be serialized into the `SameSite=` cookie attribute. When unset or `undefined`, no `SameSite=` attribute will be serialized. The valid values of this attribute are `'none'`, `'lax'`, or `'strict'`. Other values will be serialized as-is. + +When parsing cookies with a `SameSite` cookie attribute, values other than `'lax'` or `'strict'` are parsed as `'none'`. For example, `SomeCookie=SomeValue; SameSite=garbage` will parse so that `cookie.sameSite === 'none'`. + +In order to support SameSite cookies, you must provide a `sameSiteContext` option to _both_ `setCookie` and `getCookies`. Valid values for this option are just like for the Cookie object, but have particular meanings: +1. `'strict'` mode - If the request is on the same "site for cookies" (see the RFC draft for what this means), pass this option to add a layer of defense against CSRF. +2. `'lax'` mode - If the request is from another site, _but_ is directly because of navigation by the user, e.g., `` or ``, pass `sameSiteContext: 'lax'`. +3. `'none'` - Otherwise, pass `sameSiteContext: 'none'` (this indicates a cross-origin request). +4. unset/`undefined` - SameSite **will not** be enforced! This can be a valid use-case for when CSRF isn't in the threat model of the system being built. + +It is highly recommended that you read RFC 6265bis for fine details on SameSite cookies. In particular [Section 8.8](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-02#section-8.8) discusses security considerations and defense in depth. + +## Cookie Prefixes + +Supported. + +Cookie prefixes are a way to indicate that a given cookie was set with a set of attributes simply by inspecting the first few characters of the cookie's name. + +Cookie prefixes are defined in [Section 4.1.3 of 6265bis](https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.3). Two prefixes are defined: + +1. `"__Secure-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Secure-", then the cookie will have been set with a "Secure" attribute. +2. `"__Host-" Prefix`: If a cookie's name begins with a case-sensitive match for the string "__Host-", then the cookie will have been set with a "Secure" attribute, a "Path" attribute with a value of "/", and no "Domain" attribute. + +If `prefixSecurity` is enabled for `CookieJar`, then cookies that match the prefixes defined above but do not obey the attribute restrictions will not be added. + +You can define this functionality by passing in `prefixSecurity` option to `CookieJar`. It can be one of 3 values: + +1. `silent`: Enable cookie prefix checking but silently fail to add the cookie if conditions not met. Default. +2. `strict`: Enable cookie prefix checking and error out if conditions not met. +3. `unsafe-disabled`: Disable cookie prefix checking. + +Note that if `ignoreError` is passed in as `true` then the error will be silent regardless of `prefixSecurity` option (assuming it's enabled). + + +# Copyright and License + +BSD-3-Clause: + +```text + Copyright (c) 2015, Salesforce.com, Inc. + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + 3. Neither the name of Salesforce.com nor the names of its contributors may + be used to endorse or promote products derived from this software without + specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + POSSIBILITY OF SUCH DAMAGE. +``` diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js new file mode 100644 index 0000000000..a042893e11 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/cookie.js @@ -0,0 +1,1671 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const punycode = require("punycode"); +const urlParse = require("url").parse; +const util = require("util"); +const pubsuffix = require("./pubsuffix-psl"); +const Store = require("./store").Store; +const MemoryCookieStore = require("./memstore").MemoryCookieStore; +const pathMatch = require("./pathMatch").pathMatch; +const VERSION = require("./version"); +const { fromCallback } = require("universalify"); + +// From RFC6265 S4.1.1 +// note that it excludes \x3B ";" +const COOKIE_OCTETS = /^[\x21\x23-\x2B\x2D-\x3A\x3C-\x5B\x5D-\x7E]+$/; + +const CONTROL_CHARS = /[\x00-\x1F]/; + +// From Chromium // '\r', '\n' and '\0' should be treated as a terminator in +// the "relaxed" mode, see: +// https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/parsed_cookie.cc#L60 +const TERMINATORS = ["\n", "\r", "\0"]; + +// RFC6265 S4.1.1 defines path value as 'any CHAR except CTLs or ";"' +// Note ';' is \x3B +const PATH_VALUE = /[\x20-\x3A\x3C-\x7E]+/; + +// date-time parsing constants (RFC6265 S5.1.1) + +const DATE_DELIM = /[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]/; + +const MONTH_TO_NUM = { + jan: 0, + feb: 1, + mar: 2, + apr: 3, + may: 4, + jun: 5, + jul: 6, + aug: 7, + sep: 8, + oct: 9, + nov: 10, + dec: 11 +}; + +const MAX_TIME = 2147483647000; // 31-bit max +const MIN_TIME = 0; // 31-bit min +const SAME_SITE_CONTEXT_VAL_ERR = + 'Invalid sameSiteContext option for getCookies(); expected one of "strict", "lax", or "none"'; + +function checkSameSiteContext(value) { + const context = String(value).toLowerCase(); + if (context === "none" || context === "lax" || context === "strict") { + return context; + } else { + return null; + } +} + +const PrefixSecurityEnum = Object.freeze({ + SILENT: "silent", + STRICT: "strict", + DISABLED: "unsafe-disabled" +}); + +// Dumped from ip-regex@4.0.0, with the following changes: +// * all capturing groups converted to non-capturing -- "(?:)" +// * support for IPv6 Scoped Literal ("%eth1") removed +// * lowercase hexadecimal only +var IP_REGEX_LOWERCASE =/(?:^(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}$)|(?:^(?:(?:[a-f\d]{1,4}:){7}(?:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){6}(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|:[a-f\d]{1,4}|:)|(?:[a-f\d]{1,4}:){5}(?::(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,2}|:)|(?:[a-f\d]{1,4}:){4}(?:(?::[a-f\d]{1,4}){0,1}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,3}|:)|(?:[a-f\d]{1,4}:){3}(?:(?::[a-f\d]{1,4}){0,2}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,4}|:)|(?:[a-f\d]{1,4}:){2}(?:(?::[a-f\d]{1,4}){0,3}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,5}|:)|(?:[a-f\d]{1,4}:){1}(?:(?::[a-f\d]{1,4}){0,4}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,6}|:)|(?::(?:(?::[a-f\d]{1,4}){0,5}:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)(?:\.(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]\d|\d)){3}|(?::[a-f\d]{1,4}){1,7}|:)))$)/; + +/* + * Parses a Natural number (i.e., non-negative integer) with either the + * *DIGIT ( non-digit *OCTET ) + * or + * *DIGIT + * grammar (RFC6265 S5.1.1). + * + * The "trailingOK" boolean controls if the grammar accepts a + * "( non-digit *OCTET )" trailer. + */ +function parseDigits(token, minDigits, maxDigits, trailingOK) { + let count = 0; + while (count < token.length) { + const c = token.charCodeAt(count); + // "non-digit = %x00-2F / %x3A-FF" + if (c <= 0x2f || c >= 0x3a) { + break; + } + count++; + } + + // constrain to a minimum and maximum number of digits. + if (count < minDigits || count > maxDigits) { + return null; + } + + if (!trailingOK && count != token.length) { + return null; + } + + return parseInt(token.substr(0, count), 10); +} + +function parseTime(token) { + const parts = token.split(":"); + const result = [0, 0, 0]; + + /* RF6256 S5.1.1: + * time = hms-time ( non-digit *OCTET ) + * hms-time = time-field ":" time-field ":" time-field + * time-field = 1*2DIGIT + */ + + if (parts.length !== 3) { + return null; + } + + for (let i = 0; i < 3; i++) { + // "time-field" must be strictly "1*2DIGIT", HOWEVER, "hms-time" can be + // followed by "( non-digit *OCTET )" so therefore the last time-field can + // have a trailer + const trailingOK = i == 2; + const num = parseDigits(parts[i], 1, 2, trailingOK); + if (num === null) { + return null; + } + result[i] = num; + } + + return result; +} + +function parseMonth(token) { + token = String(token) + .substr(0, 3) + .toLowerCase(); + const num = MONTH_TO_NUM[token]; + return num >= 0 ? num : null; +} + +/* + * RFC6265 S5.1.1 date parser (see RFC for full grammar) + */ +function parseDate(str) { + if (!str) { + return; + } + + /* RFC6265 S5.1.1: + * 2. Process each date-token sequentially in the order the date-tokens + * appear in the cookie-date + */ + const tokens = str.split(DATE_DELIM); + if (!tokens) { + return; + } + + let hour = null; + let minute = null; + let second = null; + let dayOfMonth = null; + let month = null; + let year = null; + + for (let i = 0; i < tokens.length; i++) { + const token = tokens[i].trim(); + if (!token.length) { + continue; + } + + let result; + + /* 2.1. If the found-time flag is not set and the token matches the time + * production, set the found-time flag and set the hour- value, + * minute-value, and second-value to the numbers denoted by the digits in + * the date-token, respectively. Skip the remaining sub-steps and continue + * to the next date-token. + */ + if (second === null) { + result = parseTime(token); + if (result) { + hour = result[0]; + minute = result[1]; + second = result[2]; + continue; + } + } + + /* 2.2. If the found-day-of-month flag is not set and the date-token matches + * the day-of-month production, set the found-day-of- month flag and set + * the day-of-month-value to the number denoted by the date-token. Skip + * the remaining sub-steps and continue to the next date-token. + */ + if (dayOfMonth === null) { + // "day-of-month = 1*2DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 1, 2, true); + if (result !== null) { + dayOfMonth = result; + continue; + } + } + + /* 2.3. If the found-month flag is not set and the date-token matches the + * month production, set the found-month flag and set the month-value to + * the month denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (month === null) { + result = parseMonth(token); + if (result !== null) { + month = result; + continue; + } + } + + /* 2.4. If the found-year flag is not set and the date-token matches the + * year production, set the found-year flag and set the year-value to the + * number denoted by the date-token. Skip the remaining sub-steps and + * continue to the next date-token. + */ + if (year === null) { + // "year = 2*4DIGIT ( non-digit *OCTET )" + result = parseDigits(token, 2, 4, true); + if (result !== null) { + year = result; + /* From S5.1.1: + * 3. If the year-value is greater than or equal to 70 and less + * than or equal to 99, increment the year-value by 1900. + * 4. If the year-value is greater than or equal to 0 and less + * than or equal to 69, increment the year-value by 2000. + */ + if (year >= 70 && year <= 99) { + year += 1900; + } else if (year >= 0 && year <= 69) { + year += 2000; + } + } + } + } + + /* RFC 6265 S5.1.1 + * "5. Abort these steps and fail to parse the cookie-date if: + * * at least one of the found-day-of-month, found-month, found- + * year, or found-time flags is not set, + * * the day-of-month-value is less than 1 or greater than 31, + * * the year-value is less than 1601, + * * the hour-value is greater than 23, + * * the minute-value is greater than 59, or + * * the second-value is greater than 59. + * (Note that leap seconds cannot be represented in this syntax.)" + * + * So, in order as above: + */ + if ( + dayOfMonth === null || + month === null || + year === null || + second === null || + dayOfMonth < 1 || + dayOfMonth > 31 || + year < 1601 || + hour > 23 || + minute > 59 || + second > 59 + ) { + return; + } + + return new Date(Date.UTC(year, month, dayOfMonth, hour, minute, second)); +} + +function formatDate(date) { + return date.toUTCString(); +} + +// S5.1.2 Canonicalized Host Names +function canonicalDomain(str) { + if (str == null) { + return null; + } + str = str.trim().replace(/^\./, ""); // S4.1.2.3 & S5.2.3: ignore leading . + + // convert to IDN if any non-ASCII characters + if (punycode && /[^\u0001-\u007f]/.test(str)) { + str = punycode.toASCII(str); + } + + return str.toLowerCase(); +} + +// S5.1.3 Domain Matching +function domainMatch(str, domStr, canonicalize) { + if (str == null || domStr == null) { + return null; + } + if (canonicalize !== false) { + str = canonicalDomain(str); + domStr = canonicalDomain(domStr); + } + + /* + * S5.1.3: + * "A string domain-matches a given domain string if at least one of the + * following conditions hold:" + * + * " o The domain string and the string are identical. (Note that both the + * domain string and the string will have been canonicalized to lower case at + * this point)" + */ + if (str == domStr) { + return true; + } + + /* " o All of the following [three] conditions hold:" */ + + /* "* The domain string is a suffix of the string" */ + const idx = str.indexOf(domStr); + if (idx <= 0) { + return false; // it's a non-match (-1) or prefix (0) + } + + // next, check it's a proper suffix + // e.g., "a.b.c".indexOf("b.c") === 2 + // 5 === 3+2 + if (str.length !== domStr.length + idx) { + return false; // it's not a suffix + } + + /* " * The last character of the string that is not included in the + * domain string is a %x2E (".") character." */ + if (str.substr(idx-1,1) !== '.') { + return false; // doesn't align on "." + } + + /* " * The string is a host name (i.e., not an IP address)." */ + if (IP_REGEX_LOWERCASE.test(str)) { + return false; // it's an IP address + } + + return true; +} + +// RFC6265 S5.1.4 Paths and Path-Match + +/* + * "The user agent MUST use an algorithm equivalent to the following algorithm + * to compute the default-path of a cookie:" + * + * Assumption: the path (and not query part or absolute uri) is passed in. + */ +function defaultPath(path) { + // "2. If the uri-path is empty or if the first character of the uri-path is not + // a %x2F ("/") character, output %x2F ("/") and skip the remaining steps. + if (!path || path.substr(0, 1) !== "/") { + return "/"; + } + + // "3. If the uri-path contains no more than one %x2F ("/") character, output + // %x2F ("/") and skip the remaining step." + if (path === "/") { + return path; + } + + const rightSlash = path.lastIndexOf("/"); + if (rightSlash === 0) { + return "/"; + } + + // "4. Output the characters of the uri-path from the first character up to, + // but not including, the right-most %x2F ("/")." + return path.slice(0, rightSlash); +} + +function trimTerminator(str) { + for (let t = 0; t < TERMINATORS.length; t++) { + const terminatorIdx = str.indexOf(TERMINATORS[t]); + if (terminatorIdx !== -1) { + str = str.substr(0, terminatorIdx); + } + } + + return str; +} + +function parseCookiePair(cookiePair, looseMode) { + cookiePair = trimTerminator(cookiePair); + + let firstEq = cookiePair.indexOf("="); + if (looseMode) { + if (firstEq === 0) { + // '=' is immediately at start + cookiePair = cookiePair.substr(1); + firstEq = cookiePair.indexOf("="); // might still need to split on '=' + } + } else { + // non-loose mode + if (firstEq <= 0) { + // no '=' or is at start + return; // needs to have non-empty "cookie-name" + } + } + + let cookieName, cookieValue; + if (firstEq <= 0) { + cookieName = ""; + cookieValue = cookiePair.trim(); + } else { + cookieName = cookiePair.substr(0, firstEq).trim(); + cookieValue = cookiePair.substr(firstEq + 1).trim(); + } + + if (CONTROL_CHARS.test(cookieName) || CONTROL_CHARS.test(cookieValue)) { + return; + } + + const c = new Cookie(); + c.key = cookieName; + c.value = cookieValue; + return c; +} + +function parse(str, options) { + if (!options || typeof options !== "object") { + options = {}; + } + str = str.trim(); + + // We use a regex to parse the "name-value-pair" part of S5.2 + const firstSemi = str.indexOf(";"); // S5.2 step 1 + const cookiePair = firstSemi === -1 ? str : str.substr(0, firstSemi); + const c = parseCookiePair(cookiePair, !!options.loose); + if (!c) { + return; + } + + if (firstSemi === -1) { + return c; + } + + // S5.2.3 "unparsed-attributes consist of the remainder of the set-cookie-string + // (including the %x3B (";") in question)." plus later on in the same section + // "discard the first ";" and trim". + const unparsed = str.slice(firstSemi + 1).trim(); + + // "If the unparsed-attributes string is empty, skip the rest of these + // steps." + if (unparsed.length === 0) { + return c; + } + + /* + * S5.2 says that when looping over the items "[p]rocess the attribute-name + * and attribute-value according to the requirements in the following + * subsections" for every item. Plus, for many of the individual attributes + * in S5.3 it says to use the "attribute-value of the last attribute in the + * cookie-attribute-list". Therefore, in this implementation, we overwrite + * the previous value. + */ + const cookie_avs = unparsed.split(";"); + while (cookie_avs.length) { + const av = cookie_avs.shift().trim(); + if (av.length === 0) { + // happens if ";;" appears + continue; + } + const av_sep = av.indexOf("="); + let av_key, av_value; + + if (av_sep === -1) { + av_key = av; + av_value = null; + } else { + av_key = av.substr(0, av_sep); + av_value = av.substr(av_sep + 1); + } + + av_key = av_key.trim().toLowerCase(); + + if (av_value) { + av_value = av_value.trim(); + } + + switch (av_key) { + case "expires": // S5.2.1 + if (av_value) { + const exp = parseDate(av_value); + // "If the attribute-value failed to parse as a cookie date, ignore the + // cookie-av." + if (exp) { + // over and underflow not realistically a concern: V8's getTime() seems to + // store something larger than a 32-bit time_t (even with 32-bit node) + c.expires = exp; + } + } + break; + + case "max-age": // S5.2.2 + if (av_value) { + // "If the first character of the attribute-value is not a DIGIT or a "-" + // character ...[or]... If the remainder of attribute-value contains a + // non-DIGIT character, ignore the cookie-av." + if (/^-?[0-9]+$/.test(av_value)) { + const delta = parseInt(av_value, 10); + // "If delta-seconds is less than or equal to zero (0), let expiry-time + // be the earliest representable date and time." + c.setMaxAge(delta); + } + } + break; + + case "domain": // S5.2.3 + // "If the attribute-value is empty, the behavior is undefined. However, + // the user agent SHOULD ignore the cookie-av entirely." + if (av_value) { + // S5.2.3 "Let cookie-domain be the attribute-value without the leading %x2E + // (".") character." + const domain = av_value.trim().replace(/^\./, ""); + if (domain) { + // "Convert the cookie-domain to lower case." + c.domain = domain.toLowerCase(); + } + } + break; + + case "path": // S5.2.4 + /* + * "If the attribute-value is empty or if the first character of the + * attribute-value is not %x2F ("/"): + * Let cookie-path be the default-path. + * Otherwise: + * Let cookie-path be the attribute-value." + * + * We'll represent the default-path as null since it depends on the + * context of the parsing. + */ + c.path = av_value && av_value[0] === "/" ? av_value : null; + break; + + case "secure": // S5.2.5 + /* + * "If the attribute-name case-insensitively matches the string "Secure", + * the user agent MUST append an attribute to the cookie-attribute-list + * with an attribute-name of Secure and an empty attribute-value." + */ + c.secure = true; + break; + + case "httponly": // S5.2.6 -- effectively the same as 'secure' + c.httpOnly = true; + break; + + case "samesite": // RFC6265bis-02 S5.3.7 + const enforcement = av_value ? av_value.toLowerCase() : ""; + switch (enforcement) { + case "strict": + c.sameSite = "strict"; + break; + case "lax": + c.sameSite = "lax"; + break; + default: + // RFC6265bis-02 S5.3.7 step 1: + // "If cookie-av's attribute-value is not a case-insensitive match + // for "Strict" or "Lax", ignore the "cookie-av"." + // This effectively sets it to 'none' from the prototype. + break; + } + break; + + default: + c.extensions = c.extensions || []; + c.extensions.push(av); + break; + } + } + + return c; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Secure-", abort these steps and ignore the cookie + * entirely unless the cookie's secure-only-flag is true. + * @param cookie + * @returns boolean + */ +function isSecurePrefixConditionMet(cookie) { + return !cookie.key.startsWith("__Secure-") || cookie.secure; +} + +/** + * If the cookie-name begins with a case-sensitive match for the + * string "__Host-", abort these steps and ignore the cookie + * entirely unless the cookie meets all the following criteria: + * 1. The cookie's secure-only-flag is true. + * 2. The cookie's host-only-flag is true. + * 3. The cookie-attribute-list contains an attribute with an + * attribute-name of "Path", and the cookie's path is "/". + * @param cookie + * @returns boolean + */ +function isHostPrefixConditionMet(cookie) { + return ( + !cookie.key.startsWith("__Host-") || + (cookie.secure && + cookie.hostOnly && + cookie.path != null && + cookie.path === "/") + ); +} + +// avoid the V8 deoptimization monster! +function jsonParse(str) { + let obj; + try { + obj = JSON.parse(str); + } catch (e) { + return e; + } + return obj; +} + +function fromJSON(str) { + if (!str) { + return null; + } + + let obj; + if (typeof str === "string") { + obj = jsonParse(str); + if (obj instanceof Error) { + return null; + } + } else { + // assume it's an Object + obj = str; + } + + const c = new Cookie(); + for (let i = 0; i < Cookie.serializableProperties.length; i++) { + const prop = Cookie.serializableProperties[i]; + if (obj[prop] === undefined || obj[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if (prop === "expires" || prop === "creation" || prop === "lastAccessed") { + if (obj[prop] === null) { + c[prop] = null; + } else { + c[prop] = obj[prop] == "Infinity" ? "Infinity" : new Date(obj[prop]); + } + } else { + c[prop] = obj[prop]; + } + } + + return c; +} + +/* Section 5.4 part 2: + * "* Cookies with longer paths are listed before cookies with + * shorter paths. + * + * * Among cookies that have equal-length path fields, cookies with + * earlier creation-times are listed before cookies with later + * creation-times." + */ + +function cookieCompare(a, b) { + let cmp = 0; + + // descending for length: b CMP a + const aPathLen = a.path ? a.path.length : 0; + const bPathLen = b.path ? b.path.length : 0; + cmp = bPathLen - aPathLen; + if (cmp !== 0) { + return cmp; + } + + // ascending for time: a CMP b + const aTime = a.creation ? a.creation.getTime() : MAX_TIME; + const bTime = b.creation ? b.creation.getTime() : MAX_TIME; + cmp = aTime - bTime; + if (cmp !== 0) { + return cmp; + } + + // break ties for the same millisecond (precision of JavaScript's clock) + cmp = a.creationIndex - b.creationIndex; + + return cmp; +} + +// Gives the permutation of all possible pathMatch()es of a given path. The +// array is in longest-to-shortest order. Handy for indexing. +function permutePath(path) { + if (path === "/") { + return ["/"]; + } + const permutations = [path]; + while (path.length > 1) { + const lindex = path.lastIndexOf("/"); + if (lindex === 0) { + break; + } + path = path.substr(0, lindex); + permutations.push(path); + } + permutations.push("/"); + return permutations; +} + +function getCookieContext(url) { + if (url instanceof Object) { + return url; + } + // NOTE: decodeURI will throw on malformed URIs (see GH-32). + // Therefore, we will just skip decoding for such URIs. + try { + url = decodeURI(url); + } catch (err) { + // Silently swallow error + } + + return urlParse(url); +} + +const cookieDefaults = { + // the order in which the RFC has them: + key: "", + value: "", + expires: "Infinity", + maxAge: null, + domain: null, + path: null, + secure: false, + httpOnly: false, + extensions: null, + // set by the CookieJar: + hostOnly: null, + pathIsDefault: null, + creation: null, + lastAccessed: null, + sameSite: "none" +}; + +class Cookie { + constructor(options = {}) { + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + + Object.assign(this, cookieDefaults, options); + this.creation = this.creation || new Date(); + + // used to break creation ties in cookieCompare(): + Object.defineProperty(this, "creationIndex", { + configurable: false, + enumerable: false, // important for assert.deepEqual checks + writable: true, + value: ++Cookie.cookiesCreated + }); + } + + inspect() { + const now = Date.now(); + const hostOnly = this.hostOnly != null ? this.hostOnly : "?"; + const createAge = this.creation + ? `${now - this.creation.getTime()}ms` + : "?"; + const accessAge = this.lastAccessed + ? `${now - this.lastAccessed.getTime()}ms` + : "?"; + return `Cookie="${this.toString()}; hostOnly=${hostOnly}; aAge=${accessAge}; cAge=${createAge}"`; + } + + toJSON() { + const obj = {}; + + for (const prop of Cookie.serializableProperties) { + if (this[prop] === cookieDefaults[prop]) { + continue; // leave as prototype default + } + + if ( + prop === "expires" || + prop === "creation" || + prop === "lastAccessed" + ) { + if (this[prop] === null) { + obj[prop] = null; + } else { + obj[prop] = + this[prop] == "Infinity" // intentionally not === + ? "Infinity" + : this[prop].toISOString(); + } + } else if (prop === "maxAge") { + if (this[prop] !== null) { + // again, intentionally not === + obj[prop] = + this[prop] == Infinity || this[prop] == -Infinity + ? this[prop].toString() + : this[prop]; + } + } else { + if (this[prop] !== cookieDefaults[prop]) { + obj[prop] = this[prop]; + } + } + } + + return obj; + } + + clone() { + return fromJSON(this.toJSON()); + } + + validate() { + if (!COOKIE_OCTETS.test(this.value)) { + return false; + } + if ( + this.expires != Infinity && + !(this.expires instanceof Date) && + !parseDate(this.expires) + ) { + return false; + } + if (this.maxAge != null && this.maxAge <= 0) { + return false; // "Max-Age=" non-zero-digit *DIGIT + } + if (this.path != null && !PATH_VALUE.test(this.path)) { + return false; + } + + const cdomain = this.cdomain(); + if (cdomain) { + if (cdomain.match(/\.$/)) { + return false; // S4.1.2.3 suggests that this is bad. domainMatch() tests confirm this + } + const suffix = pubsuffix.getPublicSuffix(cdomain); + if (suffix == null) { + // it's a public suffix + return false; + } + } + return true; + } + + setExpires(exp) { + if (exp instanceof Date) { + this.expires = exp; + } else { + this.expires = parseDate(exp) || "Infinity"; + } + } + + setMaxAge(age) { + if (age === Infinity || age === -Infinity) { + this.maxAge = age.toString(); // so JSON.stringify() works + } else { + this.maxAge = age; + } + } + + cookieString() { + let val = this.value; + if (val == null) { + val = ""; + } + if (this.key === "") { + return val; + } + return `${this.key}=${val}`; + } + + // gives Set-Cookie header format + toString() { + let str = this.cookieString(); + + if (this.expires != Infinity) { + if (this.expires instanceof Date) { + str += `; Expires=${formatDate(this.expires)}`; + } else { + str += `; Expires=${this.expires}`; + } + } + + if (this.maxAge != null && this.maxAge != Infinity) { + str += `; Max-Age=${this.maxAge}`; + } + + if (this.domain && !this.hostOnly) { + str += `; Domain=${this.domain}`; + } + if (this.path) { + str += `; Path=${this.path}`; + } + + if (this.secure) { + str += "; Secure"; + } + if (this.httpOnly) { + str += "; HttpOnly"; + } + if (this.sameSite && this.sameSite !== "none") { + const ssCanon = Cookie.sameSiteCanonical[this.sameSite.toLowerCase()]; + str += `; SameSite=${ssCanon ? ssCanon : this.sameSite}`; + } + if (this.extensions) { + this.extensions.forEach(ext => { + str += `; ${ext}`; + }); + } + + return str; + } + + // TTL() partially replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + // S5.3 says to give the "latest representable date" for which we use Infinity + // For "expired" we use 0 + TTL(now) { + /* RFC6265 S4.1.2.2 If a cookie has both the Max-Age and the Expires + * attribute, the Max-Age attribute has precedence and controls the + * expiration date of the cookie. + * (Concurs with S5.3 step 3) + */ + if (this.maxAge != null) { + return this.maxAge <= 0 ? 0 : this.maxAge * 1000; + } + + let expires = this.expires; + if (expires != Infinity) { + if (!(expires instanceof Date)) { + expires = parseDate(expires) || Infinity; + } + + if (expires == Infinity) { + return Infinity; + } + + return expires.getTime() - (now || Date.now()); + } + + return Infinity; + } + + // expiryTime() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere) + expiryTime(now) { + if (this.maxAge != null) { + const relativeTo = now || this.creation || new Date(); + const age = this.maxAge <= 0 ? -Infinity : this.maxAge * 1000; + return relativeTo.getTime() + age; + } + + if (this.expires == Infinity) { + return Infinity; + } + return this.expires.getTime(); + } + + // expiryDate() replaces the "expiry-time" parts of S5.3 step 3 (setCookie() + // elsewhere), except it returns a Date + expiryDate(now) { + const millisec = this.expiryTime(now); + if (millisec == Infinity) { + return new Date(MAX_TIME); + } else if (millisec == -Infinity) { + return new Date(MIN_TIME); + } else { + return new Date(millisec); + } + } + + // This replaces the "persistent-flag" parts of S5.3 step 3 + isPersistent() { + return this.maxAge != null || this.expires != Infinity; + } + + // Mostly S5.1.2 and S5.2.3: + canonicalizedDomain() { + if (this.domain == null) { + return null; + } + return canonicalDomain(this.domain); + } + + cdomain() { + return this.canonicalizedDomain(); + } +} + +Cookie.cookiesCreated = 0; +Cookie.parse = parse; +Cookie.fromJSON = fromJSON; +Cookie.serializableProperties = Object.keys(cookieDefaults); +Cookie.sameSiteLevel = { + strict: 3, + lax: 2, + none: 1 +}; + +Cookie.sameSiteCanonical = { + strict: "Strict", + lax: "Lax" +}; + +function getNormalizedPrefixSecurity(prefixSecurity) { + if (prefixSecurity != null) { + const normalizedPrefixSecurity = prefixSecurity.toLowerCase(); + /* The three supported options */ + switch (normalizedPrefixSecurity) { + case PrefixSecurityEnum.STRICT: + case PrefixSecurityEnum.SILENT: + case PrefixSecurityEnum.DISABLED: + return normalizedPrefixSecurity; + } + } + /* Default is SILENT */ + return PrefixSecurityEnum.SILENT; +} + +class CookieJar { + constructor(store, options = { rejectPublicSuffixes: true }) { + if (typeof options === "boolean") { + options = { rejectPublicSuffixes: options }; + } + this.rejectPublicSuffixes = options.rejectPublicSuffixes; + this.enableLooseMode = !!options.looseMode; + this.allowSpecialUseDomain = !!options.allowSpecialUseDomain; + this.store = store || new MemoryCookieStore(); + this.prefixSecurity = getNormalizedPrefixSecurity(options.prefixSecurity); + this._cloneSync = syncWrap("clone"); + this._importCookiesSync = syncWrap("_importCookies"); + this.getCookiesSync = syncWrap("getCookies"); + this.getCookieStringSync = syncWrap("getCookieString"); + this.getSetCookieStringsSync = syncWrap("getSetCookieStrings"); + this.removeAllCookiesSync = syncWrap("removeAllCookies"); + this.setCookieSync = syncWrap("setCookie"); + this.serializeSync = syncWrap("serialize"); + } + + setCookie(cookie, url, options, cb) { + let err; + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const loose = options.loose || this.enableLooseMode; + + let sameSiteContext = null; + if (options.sameSiteContext) { + sameSiteContext = checkSameSiteContext(options.sameSiteContext); + if (!sameSiteContext) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + // S5.3 step 1 + if (typeof cookie === "string" || cookie instanceof String) { + cookie = Cookie.parse(cookie, { loose: loose }); + if (!cookie) { + err = new Error("Cookie failed to parse"); + return cb(options.ignoreError ? null : err); + } + } else if (!(cookie instanceof Cookie)) { + // If you're seeing this error, and are passing in a Cookie object, + // it *might* be a Cookie object from another loaded version of tough-cookie. + err = new Error( + "First argument to setCookie must be a Cookie object or string" + ); + return cb(options.ignoreError ? null : err); + } + + // S5.3 step 2 + const now = options.now || new Date(); // will assign later to save effort in the face of errors + + // S5.3 step 3: NOOP; persistent-flag and expiry-time is handled by getCookie() + + // S5.3 step 4: NOOP; domain is null by default + + // S5.3 step 5: public suffixes + if (this.rejectPublicSuffixes && cookie.domain) { + const suffix = pubsuffix.getPublicSuffix(cookie.cdomain()); + if (suffix == null) { + // e.g. "com" + err = new Error("Cookie has domain set to a public suffix"); + return cb(options.ignoreError ? null : err); + } + } + + // S5.3 step 6: + if (cookie.domain) { + if (!domainMatch(host, cookie.cdomain(), false)) { + err = new Error( + `Cookie not in this host's domain. Cookie:${cookie.cdomain()} Request:${host}` + ); + return cb(options.ignoreError ? null : err); + } + + if (cookie.hostOnly == null) { + // don't reset if already set + cookie.hostOnly = false; + } + } else { + cookie.hostOnly = true; + cookie.domain = host; + } + + //S5.2.4 If the attribute-value is empty or if the first character of the + //attribute-value is not %x2F ("/"): + //Let cookie-path be the default-path. + if (!cookie.path || cookie.path[0] !== "/") { + cookie.path = defaultPath(context.pathname); + cookie.pathIsDefault = true; + } + + // S5.3 step 8: NOOP; secure attribute + // S5.3 step 9: NOOP; httpOnly attribute + + // S5.3 step 10 + if (options.http === false && cookie.httpOnly) { + err = new Error("Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + + // 6252bis-02 S5.4 Step 13 & 14: + if (cookie.sameSite !== "none" && sameSiteContext) { + // "If the cookie's "same-site-flag" is not "None", and the cookie + // is being set from a context whose "site for cookies" is not an + // exact match for request-uri's host's registered domain, then + // abort these steps and ignore the newly created cookie entirely." + if (sameSiteContext === "none") { + err = new Error( + "Cookie is SameSite but this is a cross-origin request" + ); + return cb(options.ignoreError ? null : err); + } + } + + /* 6265bis-02 S5.4 Steps 15 & 16 */ + const ignoreErrorForPrefixSecurity = + this.prefixSecurity === PrefixSecurityEnum.SILENT; + const prefixSecurityDisabled = + this.prefixSecurity === PrefixSecurityEnum.DISABLED; + /* If prefix checking is not disabled ...*/ + if (!prefixSecurityDisabled) { + let errorFound = false; + let errorMsg; + /* Check secure prefix condition */ + if (!isSecurePrefixConditionMet(cookie)) { + errorFound = true; + errorMsg = "Cookie has __Secure prefix but Secure attribute is not set"; + } else if (!isHostPrefixConditionMet(cookie)) { + /* Check host prefix condition */ + errorFound = true; + errorMsg = + "Cookie has __Host prefix but either Secure or HostOnly attribute is not set or Path is not '/'"; + } + if (errorFound) { + return cb( + options.ignoreError || ignoreErrorForPrefixSecurity + ? null + : new Error(errorMsg) + ); + } + } + + const store = this.store; + + if (!store.updateCookie) { + store.updateCookie = function(oldCookie, newCookie, cb) { + this.putCookie(newCookie, cb); + }; + } + + function withCookie(err, oldCookie) { + if (err) { + return cb(err); + } + + const next = function(err) { + if (err) { + return cb(err); + } else { + cb(null, cookie); + } + }; + + if (oldCookie) { + // S5.3 step 11 - "If the cookie store contains a cookie with the same name, + // domain, and path as the newly created cookie:" + if (options.http === false && oldCookie.httpOnly) { + // step 11.2 + err = new Error("old Cookie is HttpOnly and this isn't an HTTP API"); + return cb(options.ignoreError ? null : err); + } + cookie.creation = oldCookie.creation; // step 11.3 + cookie.creationIndex = oldCookie.creationIndex; // preserve tie-breaker + cookie.lastAccessed = now; + // Step 11.4 (delete cookie) is implied by just setting the new one: + store.updateCookie(oldCookie, cookie, next); // step 12 + } else { + cookie.creation = cookie.lastAccessed = now; + store.putCookie(cookie, next); // step 12 + } + } + + store.findCookie(cookie.domain, cookie.path, cookie.key, withCookie); + } + + // RFC6365 S5.4 + getCookies(url, options, cb) { + const context = getCookieContext(url); + if (typeof options === "function") { + cb = options; + options = {}; + } + + const host = canonicalDomain(context.hostname); + const path = context.pathname || "/"; + + let secure = options.secure; + if ( + secure == null && + context.protocol && + (context.protocol == "https:" || context.protocol == "wss:") + ) { + secure = true; + } + + let sameSiteLevel = 0; + if (options.sameSiteContext) { + const sameSiteContext = checkSameSiteContext(options.sameSiteContext); + sameSiteLevel = Cookie.sameSiteLevel[sameSiteContext]; + if (!sameSiteLevel) { + return cb(new Error(SAME_SITE_CONTEXT_VAL_ERR)); + } + } + + let http = options.http; + if (http == null) { + http = true; + } + + const now = options.now || Date.now(); + const expireCheck = options.expire !== false; + const allPaths = !!options.allPaths; + const store = this.store; + + function matchingCookie(c) { + // "Either: + // The cookie's host-only-flag is true and the canonicalized + // request-host is identical to the cookie's domain. + // Or: + // The cookie's host-only-flag is false and the canonicalized + // request-host domain-matches the cookie's domain." + if (c.hostOnly) { + if (c.domain != host) { + return false; + } + } else { + if (!domainMatch(host, c.domain, false)) { + return false; + } + } + + // "The request-uri's path path-matches the cookie's path." + if (!allPaths && !pathMatch(path, c.path)) { + return false; + } + + // "If the cookie's secure-only-flag is true, then the request-uri's + // scheme must denote a "secure" protocol" + if (c.secure && !secure) { + return false; + } + + // "If the cookie's http-only-flag is true, then exclude the cookie if the + // cookie-string is being generated for a "non-HTTP" API" + if (c.httpOnly && !http) { + return false; + } + + // RFC6265bis-02 S5.3.7 + if (sameSiteLevel) { + const cookieLevel = Cookie.sameSiteLevel[c.sameSite || "none"]; + if (cookieLevel > sameSiteLevel) { + // only allow cookies at or below the request level + return false; + } + } + + // deferred from S5.3 + // non-RFC: allow retention of expired cookies by choice + if (expireCheck && c.expiryTime() <= now) { + store.removeCookie(c.domain, c.path, c.key, () => {}); // result ignored + return false; + } + + return true; + } + + store.findCookies( + host, + allPaths ? null : path, + this.allowSpecialUseDomain, + (err, cookies) => { + if (err) { + return cb(err); + } + + cookies = cookies.filter(matchingCookie); + + // sorting of S5.4 part 2 + if (options.sort !== false) { + cookies = cookies.sort(cookieCompare); + } + + // S5.4 part 3 + const now = new Date(); + for (const cookie of cookies) { + cookie.lastAccessed = now; + } + // TODO persist lastAccessed + + cb(null, cookies); + } + ); + } + + getCookieString(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies + .sort(cookieCompare) + .map(c => c.cookieString()) + .join("; ") + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + getSetCookieStrings(...args) { + const cb = args.pop(); + const next = function(err, cookies) { + if (err) { + cb(err); + } else { + cb( + null, + cookies.map(c => { + return c.toString(); + }) + ); + } + }; + args.push(next); + this.getCookies.apply(this, args); + } + + serialize(cb) { + let type = this.store.constructor.name; + if (type === "Object") { + type = null; + } + + // update README.md "Serialization Format" if you change this, please! + const serialized = { + // The version of tough-cookie that serialized this jar. Generally a good + // practice since future versions can make data import decisions based on + // known past behavior. When/if this matters, use `semver`. + version: `tough-cookie@${VERSION}`, + + // add the store type, to make humans happy: + storeType: type, + + // CookieJar configuration: + rejectPublicSuffixes: !!this.rejectPublicSuffixes, + + // this gets filled from getAllCookies: + cookies: [] + }; + + if ( + !( + this.store.getAllCookies && + typeof this.store.getAllCookies === "function" + ) + ) { + return cb( + new Error( + "store does not support getAllCookies and cannot be serialized" + ) + ); + } + + this.store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + serialized.cookies = cookies.map(cookie => { + // convert to serialized 'raw' cookies + cookie = cookie instanceof Cookie ? cookie.toJSON() : cookie; + + // Remove the index so new ones get assigned during deserialization + delete cookie.creationIndex; + + return cookie; + }); + + return cb(null, serialized); + }); + } + + toJSON() { + return this.serializeSync(); + } + + // use the class method CookieJar.deserialize instead of calling this directly + _importCookies(serialized, cb) { + let cookies = serialized.cookies; + if (!cookies || !Array.isArray(cookies)) { + return cb(new Error("serialized jar has no cookies array")); + } + cookies = cookies.slice(); // do not modify the original + + const putNext = err => { + if (err) { + return cb(err); + } + + if (!cookies.length) { + return cb(err, this); + } + + let cookie; + try { + cookie = fromJSON(cookies.shift()); + } catch (e) { + return cb(e); + } + + if (cookie === null) { + return putNext(null); // skip this cookie + } + + this.store.putCookie(cookie, putNext); + }; + + putNext(); + } + + clone(newStore, cb) { + if (arguments.length === 1) { + cb = newStore; + newStore = null; + } + + this.serialize((err, serialized) => { + if (err) { + return cb(err); + } + CookieJar.deserialize(serialized, newStore, cb); + }); + } + + cloneSync(newStore) { + if (arguments.length === 0) { + return this._cloneSync(); + } + if (!newStore.synchronous) { + throw new Error( + "CookieJar clone destination store is not synchronous; use async API instead." + ); + } + return this._cloneSync(newStore); + } + + removeAllCookies(cb) { + const store = this.store; + + // Check that the store implements its own removeAllCookies(). The default + // implementation in Store will immediately call the callback with a "not + // implemented" Error. + if ( + typeof store.removeAllCookies === "function" && + store.removeAllCookies !== Store.prototype.removeAllCookies + ) { + return store.removeAllCookies(cb); + } + + store.getAllCookies((err, cookies) => { + if (err) { + return cb(err); + } + + if (cookies.length === 0) { + return cb(null); + } + + let completedCount = 0; + const removeErrors = []; + + function removeCookieCb(removeErr) { + if (removeErr) { + removeErrors.push(removeErr); + } + + completedCount++; + + if (completedCount === cookies.length) { + return cb(removeErrors.length ? removeErrors[0] : null); + } + } + + cookies.forEach(cookie => { + store.removeCookie( + cookie.domain, + cookie.path, + cookie.key, + removeCookieCb + ); + }); + }); + } + + static deserialize(strOrObj, store, cb) { + if (arguments.length !== 3) { + // store is optional + cb = store; + store = null; + } + + let serialized; + if (typeof strOrObj === "string") { + serialized = jsonParse(strOrObj); + if (serialized instanceof Error) { + return cb(serialized); + } + } else { + serialized = strOrObj; + } + + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + jar._importCookies(serialized, err => { + if (err) { + return cb(err); + } + cb(null, jar); + }); + } + + static deserializeSync(strOrObj, store) { + const serialized = + typeof strOrObj === "string" ? JSON.parse(strOrObj) : strOrObj; + const jar = new CookieJar(store, serialized.rejectPublicSuffixes); + + // catch this mistake early: + if (!jar.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + jar._importCookiesSync(serialized); + return jar; + } +} +CookieJar.fromJSON = CookieJar.deserializeSync; + +[ + "_importCookies", + "clone", + "getCookies", + "getCookieString", + "getSetCookieStrings", + "removeAllCookies", + "serialize", + "setCookie" +].forEach(name => { + CookieJar.prototype[name] = fromCallback(CookieJar.prototype[name]); +}); +CookieJar.deserialize = fromCallback(CookieJar.deserialize); + +// Use a closure to provide a true imperative API for synchronous stores. +function syncWrap(method) { + return function(...args) { + if (!this.store.synchronous) { + throw new Error( + "CookieJar store is not synchronous; use async API instead." + ); + } + + let syncErr, syncResult; + this[method](...args, (err, result) => { + syncErr = err; + syncResult = result; + }); + + if (syncErr) { + throw syncErr; + } + return syncResult; + }; +} + +exports.version = VERSION; +exports.CookieJar = CookieJar; +exports.Cookie = Cookie; +exports.Store = Store; +exports.MemoryCookieStore = MemoryCookieStore; +exports.parseDate = parseDate; +exports.formatDate = formatDate; +exports.parse = parse; +exports.fromJSON = fromJSON; +exports.domainMatch = domainMatch; +exports.defaultPath = defaultPath; +exports.pathMatch = pathMatch; +exports.getPublicSuffix = pubsuffix.getPublicSuffix; +exports.cookieCompare = cookieCompare; +exports.permuteDomain = require("./permuteDomain").permuteDomain; +exports.permutePath = permutePath; +exports.canonicalDomain = canonicalDomain; +exports.PrefixSecurityEnum = PrefixSecurityEnum; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js new file mode 100644 index 0000000000..912eead354 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/memstore.js @@ -0,0 +1,190 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const { fromCallback } = require("universalify"); +const Store = require("./store").Store; +const permuteDomain = require("./permuteDomain").permuteDomain; +const pathMatch = require("./pathMatch").pathMatch; +const util = require("util"); + +class MemoryCookieStore extends Store { + constructor() { + super(); + this.synchronous = true; + this.idx = {}; + if (util.inspect.custom) { + this[util.inspect.custom] = this.inspect; + } + } + + inspect() { + return `{ idx: ${util.inspect(this.idx, false, 2)} }`; + } + + findCookie(domain, path, key, cb) { + if (!this.idx[domain]) { + return cb(null, undefined); + } + if (!this.idx[domain][path]) { + return cb(null, undefined); + } + return cb(null, this.idx[domain][path][key] || null); + } + findCookies(domain, path, allowSpecialUseDomain, cb) { + const results = []; + if (typeof allowSpecialUseDomain === "function") { + cb = allowSpecialUseDomain; + allowSpecialUseDomain = false; + } + if (!domain) { + return cb(null, []); + } + + let pathMatcher; + if (!path) { + // null means "all paths" + pathMatcher = function matchAll(domainIndex) { + for (const curPath in domainIndex) { + const pathIndex = domainIndex[curPath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }; + } else { + pathMatcher = function matchRFC(domainIndex) { + //NOTE: we should use path-match algorithm from S5.1.4 here + //(see : https://github.com/ChromiumWebApps/chromium/blob/b3d3b4da8bb94c1b2e061600df106d590fda3620/net/cookies/canonical_cookie.cc#L299) + Object.keys(domainIndex).forEach(cookiePath => { + if (pathMatch(path, cookiePath)) { + const pathIndex = domainIndex[cookiePath]; + for (const key in pathIndex) { + results.push(pathIndex[key]); + } + } + }); + }; + } + + const domains = permuteDomain(domain, allowSpecialUseDomain) || [domain]; + const idx = this.idx; + domains.forEach(curDomain => { + const domainIndex = idx[curDomain]; + if (!domainIndex) { + return; + } + pathMatcher(domainIndex); + }); + + cb(null, results); + } + + putCookie(cookie, cb) { + if (!this.idx[cookie.domain]) { + this.idx[cookie.domain] = {}; + } + if (!this.idx[cookie.domain][cookie.path]) { + this.idx[cookie.domain][cookie.path] = {}; + } + this.idx[cookie.domain][cookie.path][cookie.key] = cookie; + cb(null); + } + updateCookie(oldCookie, newCookie, cb) { + // updateCookie() may avoid updating cookies that are identical. For example, + // lastAccessed may not be important to some stores and an equality + // comparison could exclude that field. + this.putCookie(newCookie, cb); + } + removeCookie(domain, path, key, cb) { + if ( + this.idx[domain] && + this.idx[domain][path] && + this.idx[domain][path][key] + ) { + delete this.idx[domain][path][key]; + } + cb(null); + } + removeCookies(domain, path, cb) { + if (this.idx[domain]) { + if (path) { + delete this.idx[domain][path]; + } else { + delete this.idx[domain]; + } + } + return cb(null); + } + removeAllCookies(cb) { + this.idx = {}; + return cb(null); + } + getAllCookies(cb) { + const cookies = []; + const idx = this.idx; + + const domains = Object.keys(idx); + domains.forEach(domain => { + const paths = Object.keys(idx[domain]); + paths.forEach(path => { + const keys = Object.keys(idx[domain][path]); + keys.forEach(key => { + if (key !== null) { + cookies.push(idx[domain][path][key]); + } + }); + }); + }); + + // Sort by creationIndex so deserializing retains the creation order. + // When implementing your own store, this SHOULD retain the order too + cookies.sort((a, b) => { + return (a.creationIndex || 0) - (b.creationIndex || 0); + }); + + cb(null, cookies); + } +} + +[ + "findCookie", + "findCookies", + "putCookie", + "updateCookie", + "removeCookie", + "removeCookies", + "removeAllCookies", + "getAllCookies" +].forEach(name => { + MemoryCookieStore[name] = fromCallback(MemoryCookieStore.prototype[name]); +}); + +exports.MemoryCookieStore = MemoryCookieStore; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js new file mode 100644 index 0000000000..16ff63eea6 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pathMatch.js @@ -0,0 +1,61 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +/* + * "A request-path path-matches a given cookie-path if at least one of the + * following conditions holds:" + */ +function pathMatch(reqPath, cookiePath) { + // "o The cookie-path and the request-path are identical." + if (cookiePath === reqPath) { + return true; + } + + const idx = reqPath.indexOf(cookiePath); + if (idx === 0) { + // "o The cookie-path is a prefix of the request-path, and the last + // character of the cookie-path is %x2F ("/")." + if (cookiePath.substr(-1) === "/") { + return true; + } + + // " o The cookie-path is a prefix of the request-path, and the first + // character of the request-path that is not included in the cookie- path + // is a %x2F ("/") character." + if (reqPath.substr(cookiePath.length, 1) === "/") { + return true; + } + } + + return false; +} + +exports.pathMatch = pathMatch; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js new file mode 100644 index 0000000000..78e6cad568 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/permuteDomain.js @@ -0,0 +1,70 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const pubsuffix = require("./pubsuffix-psl"); + +// Gives the permutation of all possible domainMatch()es of a given domain. The +// array is in shortest-to-longest order. Handy for indexing. +const SPECIAL_USE_DOMAINS = ["local"]; // RFC 6761 +function permuteDomain(domain, allowSpecialUseDomain) { + let pubSuf = null; + if (allowSpecialUseDomain) { + const domainParts = domain.split("."); + if (SPECIAL_USE_DOMAINS.includes(domainParts[domainParts.length - 1])) { + pubSuf = `${domainParts[domainParts.length - 2]}.${ + domainParts[domainParts.length - 1] + }`; + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + } else { + pubSuf = pubsuffix.getPublicSuffix(domain); + } + + if (!pubSuf) { + return null; + } + if (pubSuf == domain) { + return [domain]; + } + + const prefix = domain.slice(0, -(pubSuf.length + 1)); // ".example.com" + const parts = prefix.split(".").reverse(); + let cur = pubSuf; + const permutations = [cur]; + while (parts.length) { + cur = `${parts.shift()}.${cur}`; + permutations.push(cur); + } + return permutations; +} + +exports.permuteDomain = permuteDomain; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js new file mode 100644 index 0000000000..93a8577c6b --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/pubsuffix-psl.js @@ -0,0 +1,38 @@ +/*! + * Copyright (c) 2018, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +const psl = require("psl"); + +function getPublicSuffix(domain) { + return psl.get(domain); +} + +exports.getPublicSuffix = getPublicSuffix; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js new file mode 100644 index 0000000000..2ed0259e02 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/store.js @@ -0,0 +1,76 @@ +/*! + * Copyright (c) 2015, Salesforce.com, Inc. + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * 3. Neither the name of Salesforce.com nor the names of its contributors may + * be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS + * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN + * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ +"use strict"; +/*jshint unused:false */ + +class Store { + constructor() { + this.synchronous = false; + } + + findCookie(domain, path, key, cb) { + throw new Error("findCookie is not implemented"); + } + + findCookies(domain, path, allowSpecialUseDomain, cb) { + throw new Error("findCookies is not implemented"); + } + + putCookie(cookie, cb) { + throw new Error("putCookie is not implemented"); + } + + updateCookie(oldCookie, newCookie, cb) { + // recommended default implementation: + // return this.putCookie(newCookie, cb); + throw new Error("updateCookie is not implemented"); + } + + removeCookie(domain, path, key, cb) { + throw new Error("removeCookie is not implemented"); + } + + removeCookies(domain, path, cb) { + throw new Error("removeCookies is not implemented"); + } + + removeAllCookies(cb) { + throw new Error("removeAllCookies is not implemented"); + } + + getAllCookies(cb) { + throw new Error( + "getAllCookies is not implemented (therefore jar cannot be serialized)" + ); + } +} + +exports.Store = Store; diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js new file mode 100644 index 0000000000..e52f25be72 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/lib/version.js @@ -0,0 +1,2 @@ +// generated by genversion +module.exports = '4.0.0' diff --git a/node_modules/@azure/core-http/node_modules/tough-cookie/package.json b/node_modules/@azure/core-http/node_modules/tough-cookie/package.json new file mode 100644 index 0000000000..e07dcb7cbf --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tough-cookie/package.json @@ -0,0 +1,109 @@ +{ + "author": { + "name": "Jeremy Stashewsky", + "email": "jstash@gmail.com", + "website": "https://github.com/stash" + }, + "contributors": [ + { + "name": "Ivan Nikulin", + "website": "https://github.com/inikulin" + }, + { + "name": "Shivan Kaul Sahib", + "website": "https://github.com/ShivanKaul" + }, + { + "name": "Clint Ruoho", + "website": "https://github.com/ruoho" + }, + { + "name": "Ian Livingstone", + "website": "https://github.com/ianlivingstone" + }, + { + "name": "Andrew Waterman", + "website": "https://github.com/awaterma" + }, + { + "name": "Michael de Libero ", + "website": "https://github.com/medelibero-sfdc" + }, + { + "name": "Jonathan Stewmon", + "website": "https://github.com/jstewmon" + }, + { + "name": "Miguel Roncancio", + "website": "https://github.com/miggs125" + }, + { + "name": "Sebastian Mayr", + "website": "https://github.com/Sebmaster" + }, + { + "name": "Alexander Savin", + "website": "https://github.com/apsavin" + }, + { + "name": "Lalit Kapoor", + "website": "https://github.com/lalitkapoor" + }, + { + "name": "Sam Thompson", + "website": "https://github.com/sambthompson" + } + ], + "license": "BSD-3-Clause", + "name": "tough-cookie", + "description": "RFC6265 Cookies and Cookie Jar for node.js", + "keywords": [ + "HTTP", + "cookie", + "cookies", + "set-cookie", + "cookiejar", + "jar", + "RFC6265", + "RFC2965" + ], + "version": "4.0.0", + "homepage": "https://github.com/salesforce/tough-cookie", + "repository": { + "type": "git", + "url": "git://github.com/salesforce/tough-cookie.git" + }, + "bugs": { + "url": "https://github.com/salesforce/tough-cookie/issues" + }, + "main": "./lib/cookie", + "files": [ + "lib" + ], + "scripts": { + "version": "genversion lib/version.js && git add lib/version.js", + "test": "vows test/*_test.js", + "cover": "nyc --reporter=lcov --reporter=html vows test/*_test.js", + "eslint": "eslint --env node --ext .js .", + "prettier": "prettier '**/*.{json,ts,yaml,md}'", + "format": "npm run eslint -- --fix" + }, + "engines": { + "node": ">=6" + }, + "devDependencies": { + "async": "^2.6.2", + "eslint": "^5.16.0", + "eslint-config-prettier": "^4.2.0", + "eslint-plugin-prettier": "^3.0.1", + "genversion": "^2.1.0", + "nyc": "^14.0.0", + "prettier": "^1.17.0", + "vows": "^0.8.2" + }, + "dependencies": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.1.2" + } +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/README.md b/node_modules/@azure/core-http/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-http/node_modules/tslib/modules/index.js b/node_modules/@azure/core-http/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-http/node_modules/tslib/modules/package.json b/node_modules/@azure/core-http/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/package.json b/node_modules/@azure/core-http/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.html b/node_modules/@azure/core-http/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/tslib/tslib.js b/node_modules/@azure/core-http/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/core-http/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-http/package.json b/node_modules/@azure/core-http/package.json new file mode 100644 index 0000000000..908db98811 --- /dev/null +++ b/node_modules/@azure/core-http/package.json @@ -0,0 +1,178 @@ +{ + "name": "@azure/core-http", + "sdk-type": "client", + "author": "Microsoft Corporation", + "version": "2.2.5", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "engines": { + "node": ">=12.0.0" + }, + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime", + "azure", + "cloud" + ], + "main": "dist/index.js", + "module": "./dist-esm/src/coreHttp.js", + "types": "./types/latest/src/coreHttp.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/src/*": [ + "types/3.1/src/*" + ] + } + }, + "files": [ + "dist/", + "dist-esm/src/", + "dom-shim.d.ts", + "types/*/src/**/*.d.ts", + "types/*/src/**/*.d.ts.map", + "README.md", + "LICENSE" + ], + "browser": { + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.browser.js", + "./dist-esm/src/policies/disableResponseDecompressionPolicy.js": "./dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js", + "./dist-esm/src/policies/proxyPolicy.js": "./dist-esm/src/policies/proxyPolicy.browser.js", + "./dist-esm/src/util/base64.js": "./dist-esm/src/util/base64.browser.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.browser.js", + "./dist-esm/src/defaultHttpClient.js": "./dist-esm/src/defaultHttpClient.browser.js", + "./dist-esm/src/util/inspect.js": "./dist-esm/src/util/inspect.browser.js" + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/coreHttp.js", + "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.js", + "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.native.js" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p tsconfig.es.json && dev-tool run bundle", + "build:types": "downlevel-dts types/latest/ types/3.1/", + "build": "npm run clean && tsc -p tsconfig.es.json && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p tsconfig.es.json && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "cross-env TS_NODE_FILES=true mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 500000 --full-trace --exclude \"test/**/*.browser.ts\" \"test/**/*.ts\"" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + }, + "//metadata": { + "constantPaths": [ + { + "path": "src/util/constants.ts", + "prefix": "coreHttpVersion" + } + ] + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.3.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "@types/node-fetch": "^2.5.0", + "@types/tunnel": "^0.0.3", + "form-data": "^4.0.0", + "node-fetch": "^2.6.7", + "process": "^0.11.10", + "tough-cookie": "^4.0.0", + "tslib": "^2.2.0", + "tunnel": "^0.0.6", + "uuid": "^8.3.0", + "xml2js": "^0.4.19" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/logger-js": "^1.0.2", + "@microsoft/api-extractor": "7.18.11", + "@opentelemetry/api": "^1.0.1", + "@types/chai": "^4.1.6", + "@types/express": "^4.16.0", + "@types/glob": "^7.1.1", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/sinon": "^9.0.4", + "@types/tough-cookie": "^4.0.0", + "@types/uuid": "^8.0.0", + "@types/xml2js": "^0.4.11", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "express": "^4.16.3", + "fetch-mock": "^9.10.1", + "glob": "^7.1.2", + "karma": "^6.2.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^3.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^2.0.1", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "npm-run-all": "^4.1.5", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^13.5.1", + "regenerator-runtime": "^0.13.3", + "rimraf": "^3.0.0", + "shx": "^0.3.2", + "sinon": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "~4.6.0", + "uglify-js": "^3.4.9", + "xhr-mock": "^2.4.1" + } +} diff --git a/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts b/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts new file mode 100644 index 0000000000..f25460c6a0 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/coreHttp.d.ts @@ -0,0 +1,51 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "./util/delay"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts new file mode 100644 index 0000000000..68a4bdd22f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts new file mode 100644 index 0000000000..7ea7d81763 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 0000000000..38451d52fe --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000000..9da89224ce --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000000..8ba0538fa5 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts new file mode 100644 index 0000000000..4be8a9f580 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000000..5897a2eff1 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts new file mode 100644 index 0000000000..a96bb0f038 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000000..26fcaefa27 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts new file mode 100644 index 0000000000..c46685882c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts new file mode 100644 index 0000000000..6f853022c7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts new file mode 100644 index 0000000000..40c4cf1370 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts new file mode 100644 index 0000000000..1c033c65e8 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts new file mode 100644 index 0000000000..70acd5599c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000000..26520c4663 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts new file mode 100644 index 0000000000..1cf52847fc --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/log.d.ts b/node_modules/@azure/core-http/types/3.1/src/log.d.ts new file mode 100644 index 0000000000..c757fef42c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000000..40bef94057 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,64 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Pick> & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Pick> & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private readonly cookieJar; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts new file mode 100644 index 0000000000..88327d290d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts new file mode 100644 index 0000000000..d193c984b9 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts new file mode 100644 index 0000000000..4b79aaee67 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts new file mode 100644 index 0000000000..bfe4a1f624 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts new file mode 100644 index 0000000000..ff8bd2a2e6 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts new file mode 100644 index 0000000000..edc81c5de2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 0000000000..2f06728a05 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000000..e088109e4c --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 0000000000..72def6e711 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 0000000000..73710e6507 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000000..2662d91710 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000000..62203ad7b1 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 0000000000..dd195c48fc --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts new file mode 100644 index 0000000000..c6561afabe --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/logPolicy.d.ts @@ -0,0 +1,77 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /* + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + + + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + allowedHeaderNames: Set; + /* + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + + + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + allowedQueryParameters: Set; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000000..09a257fb73 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000000..2e50b8b452 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 0000000000..57c6b438e9 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 0000000000..72e4ed6d1a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000000..963560df65 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts new file mode 100644 index 0000000000..f202890ee2 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts new file mode 100644 index 0000000000..dcf0aa9495 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts new file mode 100644 index 0000000000..3fb5d15e80 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000000..f17bc67d31 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts new file mode 100644 index 0000000000..577d72f0c7 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000000..cdb8a54915 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000000..193dc2573f --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts new file mode 100644 index 0000000000..793ef1c54e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000000..6a4b2f7efe --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts new file mode 100644 index 0000000000..0d1cf99cfa --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/proxyAgent.d.ts @@ -0,0 +1,14 @@ +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts new file mode 100644 index 0000000000..b68b15d904 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/restError.d.ts b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts new file mode 100644 index 0000000000..960d2ff8a9 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts new file mode 100644 index 0000000000..30eb1b598a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serializer.d.ts @@ -0,0 +1,356 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts new file mode 100644 index 0000000000..f77862cd3a --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/url.d.ts b/node_modules/@azure/core-http/types/3.1/src/url.d.ts new file mode 100644 index 0000000000..986a955d9d --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts new file mode 100644 index 0000000000..94d313ee1e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts new file mode 100644 index 0000000000..1b7a4de7b0 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts new file mode 100644 index 0000000000..cfa1b24c77 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts new file mode 100644 index 0000000000..fd55daecd0 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/delay.d.ts @@ -0,0 +1,15 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export declare function delay(delayInMs: number, value?: T, options?: { + abortSignal?: AbortSignalLike; + abortErrorMsg?: string; +}): Promise; +//# sourceMappingURL=delay.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 0000000000..f683083aac --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from "../coreHttp"; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts new file mode 100644 index 0000000000..9f3d211006 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts new file mode 100644 index 0000000000..d203786d63 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts new file mode 100644 index 0000000000..972d5452d1 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts new file mode 100644 index 0000000000..e3e4e2d2af --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 0000000000..544c3a3d14 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts new file mode 100644 index 0000000000..f6b32c664e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/typeguards.d.ts @@ -0,0 +1,7 @@ +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +//# sourceMappingURL=typeguards.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts new file mode 100644 index 0000000000..abffcd57ee --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/utils.d.ts @@ -0,0 +1,131 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts new file mode 100644 index 0000000000..dfba9cd358 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts new file mode 100644 index 0000000000..42c905689b --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts new file mode 100644 index 0000000000..d5bb52923e --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map diff --git a/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts new file mode 100644 index 0000000000..f08fd80f61 --- /dev/null +++ b/node_modules/@azure/core-http/types/3.1/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map diff --git a/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts new file mode 100644 index 0000000000..b1aeeffb02 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts @@ -0,0 +1,51 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, } from "./webResource"; +export { CommonResponse, CommonRequestInit, CommonRequestInfo } from "./nodeFetchHttpClient"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationOptions, OperationRequestOptions, operationOptionsToRequestOptionsBase, } from "./operationOptions"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { ServiceClient, ServiceClientOptions, flattenResponse, createPipelineFromOptions, ProxySettings, ProxyOptions, } from "./serviceClient"; +export { PipelineOptions, InternalPipelineOptions } from "./pipelineOptions"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; +export { LogPolicyOptions, logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy, RetryOptions, RetryMode } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy, RedirectOptions } from "./policies/redirectPolicy"; +export { keepAlivePolicy, KeepAliveOptions } from "./policies/keepAlivePolicy"; +export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, UserAgentOptions, TelemetryInfo, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, DeserializationOptions, deserializeResponseBody, DeserializationContentTypes, } from "./policies/deserializationPolicy"; +export { tracingPolicy, TracingPolicyOptions } from "./policies/tracingPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { AbortSignalLike } from "@azure/abort-controller"; +export { delay } from "./util/delay"; +export { createSpanFunction, SpanConfig } from "./createSpanLegacy"; +export { TokenCredential, GetTokenOptions, AccessToken, isTokenCredential } from "@azure/core-auth"; +export { AccessTokenCache, ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; +export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { parseXML, stringifyXML } from "./util/xml"; +export { XML_ATTRKEY, XML_CHARKEY, SerializerOptions } from "./util/serializer.common"; +//# sourceMappingURL=coreHttp.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map new file mode 100644 index 0000000000..4f169704a0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/coreHttp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"coreHttp.d.ts","sourceRoot":"","sources":["../../../src/coreHttp.ts"],"names":[],"mappings":";AAMA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,GACtB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,cAAc,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,MAAM,uBAAuB,CAAC;AAC7F,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,gBAAgB,EAChB,uBAAuB,EACvB,oCAAoC,GACrC,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,oBAAoB,EACpB,eAAe,EACf,yBAAyB,EACzB,aAAa,EACb,YAAY,GACb,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC7E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,+BAA+B,EAAE,MAAM,4CAA4C,CAAC;AAC7F,OAAO,EAAE,gBAAgB,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,YAAY,EAAE,SAAS,EAAE,MAAM,mCAAmC,CAAC;AACpG,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,eAAe,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC/E,OAAO,EAAE,kCAAkC,EAAE,MAAM,+CAA+C,CAAC;AACnG,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,eAAe,EACf,wBAAwB,EACxB,gBAAgB,EAChB,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,qBAAqB,EACrB,sBAAsB,EACtB,uBAAuB,EACvB,2BAA2B,GAC5B,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAC/E,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAC7C,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,KAAK,EAAE,MAAM,cAAc,CAAC;AAErC,OAAO,EAAE,kBAAkB,EAAE,UAAU,EAAE,MAAM,oBAAoB,CAAC;AAGpE,OAAO,EAAE,eAAe,EAAE,eAAe,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpG,OAAO,EAAE,gBAAgB,EAAE,wBAAwB,EAAE,MAAM,gCAAgC,CAAC;AAC5F,OAAO,EAAE,oBAAoB,EAAE,MAAM,oCAAoC,CAAC;AAC1E,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAE1D,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AACpD,OAAO,EAAE,WAAW,EAAE,WAAW,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts new file mode 100644 index 0000000000..9b87f7cffc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts @@ -0,0 +1,32 @@ +import { Span } from "@azure/core-tracing"; +import { OperationOptions } from "./operationOptions"; +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use core-tracing instead. + * @hidden + */ +export interface SpanConfig { + /** + * Package name prefix + */ + packagePrefix: string; + /** + * Service namespace + */ + namespace: string; +} +/** + * This function is only here for compatibility. Use createSpanFunction in core-tracing. + * + * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. + * @hidden + + * @param spanConfig - The name of the operation being performed. + * @param tracingOptions - The options for the underlying http request. + */ +export declare function createSpanFunction(args: SpanConfig): (operationName: string, operationOptions: T) => { + span: Span; + updatedOptions: T; +}; +//# sourceMappingURL=createSpanLegacy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map new file mode 100644 index 0000000000..3e83ac3b3e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/createSpanLegacy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpanLegacy.d.ts","sourceRoot":"","sources":["../../../src/createSpanLegacy.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,IAAI,EAAuD,MAAM,qBAAqB,CAAC;AAChG,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;;;GAKG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;;;;;;;GAQG;AACH,wBAAgB,kBAAkB,CAChC,IAAI,EAAE,UAAU,GACf,CAAC,CAAC,SAAS,gBAAgB,EAC5B,aAAa,EAAE,MAAM,EACrB,gBAAgB,EAAE,CAAC,KAChB;IAAE,IAAI,EAAE,IAAI,CAAC;IAAC,cAAc,EAAE,CAAC,CAAA;CAAE,CAErC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts new file mode 100644 index 0000000000..f57bdba3d5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts @@ -0,0 +1,48 @@ +import { AccessToken } from "@azure/core-auth"; +/** + * Defines the default token refresh buffer duration. + */ +export declare const TokenRefreshBufferMs: number; +/** + * Provides a cache for an AccessToken that was that + * was returned from a TokenCredential. + */ +export interface AccessTokenCache { + /** + * Sets the cached token. + * + * @param accessToken - The {@link AccessToken} to be cached or null to + * clear the cached token. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached {@link AccessToken} or undefined if nothing is cached. + */ + getCachedToken(): AccessToken | undefined; +} +/** + * Provides an {@link AccessTokenCache} implementation which clears + * the cached {@link AccessToken}'s after the expiresOnTimestamp has + * passed. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class ExpiringAccessTokenCache implements AccessTokenCache { + private tokenRefreshBufferMs; + private cachedToken?; + /** + * Constructs an instance of {@link ExpiringAccessTokenCache} with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs?: number); + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ + setCachedToken(accessToken: AccessToken | undefined): void; + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ + getCachedToken(): AccessToken | undefined; +} +//# sourceMappingURL=accessTokenCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map new file mode 100644 index 0000000000..15d69fb3ce --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenCache.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenCache.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAE/C;;GAEG;AACH,eAAO,MAAM,oBAAoB,QAAgB,CAAC;AAElD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI,CAAC;IAE3D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS,CAAC;CAC3C;AAED;;;;;;GAMG;AACH,qBAAa,wBAAyB,YAAW,gBAAgB;IAC/D,OAAO,CAAC,oBAAoB,CAAS;IACrC,OAAO,CAAC,WAAW,CAAC,CAA0B;IAE9C;;;OAGG;gBACS,oBAAoB,GAAE,MAA6B;IAI/D;;;OAGG;IACH,cAAc,CAAC,WAAW,EAAE,WAAW,GAAG,SAAS,GAAG,IAAI;IAI1D;;OAEG;IACH,cAAc,IAAI,WAAW,GAAG,SAAS;CAU1C"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts new file mode 100644 index 0000000000..e3c8d38767 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts @@ -0,0 +1,32 @@ +import { AccessToken, GetTokenOptions, TokenCredential } from "@azure/core-auth"; +/** + * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. + * + * @deprecated No longer used in the bearer authorization policy. + */ +export declare class AccessTokenRefresher { + private credential; + private scopes; + private requiredMillisecondsBeforeNewRefresh; + private promise; + private lastCalled; + constructor(credential: TokenCredential, scopes: string | string[], requiredMillisecondsBeforeNewRefresh?: number); + /** + * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying + * that we are ready for a new refresh. + */ + isReady(): boolean; + /** + * Stores the time in which it is called, + * then requests a new token, + * then sets this.promise to undefined, + * then returns the token. + */ + private getToken; + /** + * Requests a new token if we're not currently waiting for a new token. + * Returns null if the required time between each call hasn't been reached. + */ + refresh(options: GetTokenOptions): Promise; +} +//# sourceMappingURL=accessTokenRefresher.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map new file mode 100644 index 0000000000..5cf3f0f038 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/accessTokenRefresher.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"accessTokenRefresher.d.ts","sourceRoot":"","sources":["../../../../src/credentials/accessTokenRefresher.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AAEjF;;;;GAIG;AACH,qBAAa,oBAAoB;IAK7B,OAAO,CAAC,UAAU;IAClB,OAAO,CAAC,MAAM;IACd,OAAO,CAAC,oCAAoC;IAN9C,OAAO,CAAC,OAAO,CAA+C;IAC9D,OAAO,CAAC,UAAU,CAAK;gBAGb,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,EACzB,oCAAoC,GAAE,MAAc;IAG9D;;;OAGG;IACI,OAAO,IAAI,OAAO;IAOzB;;;;;OAKG;YACW,QAAQ;IAOtB;;;OAGG;IACI,OAAO,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,WAAW,GAAG,SAAS,CAAC;CAO3E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000000..a1b9a6409e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,44 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 0000000000..e32be59821 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;OAEG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000000..1210598280 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,36 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + /** + * Username + */ + userName: string; + /** + * Password + */ + password: string; + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @param userName - User name. + * @param password - Password. + * @param authorizationScheme - The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike to be signed. + * @returns The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 0000000000..c2c5c3ef9c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AACtE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAKjD;;GAEG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,QAAQ,EAAE,MAAM,CAAC;IAEjB;;;OAGG;IACH,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAOpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts new file mode 100644 index 0000000000..d8d1c5c1a6 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts @@ -0,0 +1,6 @@ +/** + * A function that receives a challenge and resolves a promise with a string token. + * @deprecated The Authenticator type is not currently in use. + */ +export declare type Authenticator = (challenge: unknown) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map new file mode 100644 index 0000000000..8617d1b52e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/credentials.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,OAAO,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000000..a65d0470de --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,14 @@ +import { WebResourceLike } from "../webResource"; +/** + * Represents an object or class with a `signRequest` method which will sign outgoing requests (for example, by setting the `Authorization` header). + */ +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param webResource - The WebResourceLike/request to be signed. + * @returns The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 0000000000..8a9416e148 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts new file mode 100644 index 0000000000..5e939fccf9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts @@ -0,0 +1,13 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @param topicKey - The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map new file mode 100644 index 0000000000..3adcd9d2ba --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../../src/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAA2B,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAEjF;;GAEG;AACH,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;OAIG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000000..883145a45b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map new file mode 100644 index 0000000000..2613c9052f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts new file mode 100644 index 0000000000..a7381a249b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map new file mode 100644 index 0000000000..c15b8dd5a9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../../src/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts new file mode 100644 index 0000000000..5f5f00e974 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map new file mode 100644 index 0000000000..060dbe014d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../../src/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts new file mode 100644 index 0000000000..67ac61be3f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts @@ -0,0 +1,3 @@ +import { HttpClient } from "./httpClient"; +export declare function getCachedDefaultHttpClient(): HttpClient; +//# sourceMappingURL=httpClientCache.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map new file mode 100644 index 0000000000..09a6518b80 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpClientCache.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClientCache.d.ts","sourceRoot":"","sources":["../../../src/httpClientCache.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C,wBAAgB,0BAA0B,IAAI,UAAU,CAMvD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts new file mode 100644 index 0000000000..7e487d9415 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts @@ -0,0 +1,136 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: unknown): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders implements HttpHeadersLike { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options?: { + preserveCase?: boolean; + }): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map new file mode 100644 index 0000000000..e0631334c4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../../src/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,CAAC,OAAO,CAAC,EAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,cAAc,CAAC;CAC9D;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CA+B7E;AAED;;GAEG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IAInC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,CAAC,OAAO,GAAE;QAAE,YAAY,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,cAAc;IAgBvE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAQ5B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts new file mode 100644 index 0000000000..5e3613d0da --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts @@ -0,0 +1,78 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; +} +/** + * The flattened response to a REST call. + * Contains the underlying {@link HttpOperationResponse} as well as + * the merged properties of the `parsedBody`, `parsedHeaders`, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + /** + * The flattened properties described by the `OperationSpec`, deserialized from headers and the HTTP body. + */ + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map new file mode 100644 index 0000000000..96af4ccfe5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../../src/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;CAC5C;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000000..7a13ab2918 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map new file mode 100644 index 0000000000..879719da40 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts new file mode 100644 index 0000000000..cd26a55593 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel - The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel - The HttpLogDetailLevel associated with this message. + * @param message - The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map new file mode 100644 index 0000000000..22fa885871 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../../src/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts b/node_modules/@azure/core-http/types/latest/src/log.d.ts new file mode 100644 index 0000000000..d9ad771b89 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts @@ -0,0 +1,2 @@ +export declare const logger: import("@azure/logger").AzureLogger; +//# sourceMappingURL=log.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/log.d.ts.map b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map new file mode 100644 index 0000000000..0a002fa3d5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/log.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"log.d.ts","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,qCAAkC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000000..4a844e8694 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts @@ -0,0 +1,64 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { Transform } from "stream"; +import { TransferProgressEvent, WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * String URLs used when calling to `fetch()`. + */ +export declare type CommonRequestInfo = string; +/** + * An object containing information about the outgoing HTTP request. + */ +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +/** + * An object containing information about the incoming HTTP response. + */ +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare class ReportTransform extends Transform { + private progressCallback; + private loadedBytes; + _transform(chunk: string | Buffer, _encoding: string, callback: (arg: any) => void): void; + constructor(progressCallback: (progress: TransferProgressEvent) => void); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +/** + * An HTTP client that uses `node-fetch`. + */ +export declare class NodeFetchHttpClient implements HttpClient { + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ + sendRequest(httpRequest: WebResourceLike): Promise; + private proxyAgentMap; + private keepAliveAgents; + private readonly cookieJar; + private getOrCreateAgent; + /** + * Uses `node-fetch` to perform the request. + */ + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + /** + * Prepares a request based on the provided web resource. + */ + prepareRequest(httpRequest: WebResourceLike): Promise>; + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map new file mode 100644 index 0000000000..28ab8ba37a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../../src/nodeFetchHttpClient.ts"],"names":[],"mappings":";AAOA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAE7D,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAC7C,OAAO,EAAE,qBAAqB,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAuBhE;;GAEG;AACH,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC;;GAEG;AACH,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF;;GAEG;AACH,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,qBAAa,eAAgB,SAAQ,SAAS;IAShC,OAAO,CAAC,gBAAgB;IARpC,OAAO,CAAC,WAAW,CAAa;IAChC,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,IAAI,GAAG,IAAI;gBAOrE,gBAAgB,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI;CAGhF;AAiBD;;GAEG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D;AAED;;GAEG;AACH,qBAAa,mBAAoB,YAAW,UAAU;IACpD;;;;OAIG;IACG,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAyL/E,OAAO,CAAC,aAAa,CAAsC;IAC3D,OAAO,CAAC,eAAe,CAAkB;IAEzC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAuD;IAEjF,OAAO,CAAC,gBAAgB;IAqDxB;;OAEG;IAEG,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIxF;;OAEG;IACG,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAyBjF;;OAEG;IACG,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB9E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts new file mode 100644 index 0000000000..7eadcda917 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map new file mode 100644 index 0000000000..341dc04c28 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../../src/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts new file mode 100644 index 0000000000..6ea66e624e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts @@ -0,0 +1,56 @@ +import { RequestOptionsBase, TransferProgressEvent } from "./webResource"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationTracingOptions } from "@azure/core-tracing"; +/** + * The base options type for all operations. + */ +export interface OperationOptions { + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * Options used when creating and sending HTTP requests for this operation. + */ + requestOptions?: OperationRequestOptions; + /** + * Options used when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options that allow configuring the handling of HTTP requests made by an SDK operation. + */ +export interface OperationRequestOptions { + /** + * User defined custom request headers that will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); +} +/** + * Converts an OperationOptions to a RequestOptionsBase + * + * @param opts - OperationOptions object to convert to RequestOptionsBase + */ +export declare function operationOptionsToRequestOptionsBase(opts: T): RequestOptionsBase; +//# sourceMappingURL=operationOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map new file mode 100644 index 0000000000..31a30b9eba --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationOptions.d.ts","sourceRoot":"","sources":["../../../src/operationOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAC;AAC1E,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,qBAAqB,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;IACzC;;OAEG;IACH,cAAc,CAAC,EAAE,uBAAuB,CAAC;CAC1C;AAED;;GAEG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;CAC9E;AAED;;;;GAIG;AACH,wBAAgB,oCAAoC,CAAC,CAAC,SAAS,gBAAgB,EAC7E,IAAI,EAAE,CAAC,GACN,kBAAkB,CAgBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts new file mode 100644 index 0000000000..4f0aa660e0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts @@ -0,0 +1,54 @@ +import { Mapper } from "./serializer"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +/** + * A path which describes how to access a particular property in a given object data source. May be a single property name, an array that denotes nested property names, or a set of multiple named properties with paths in the case of complex object values. + */ +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map new file mode 100644 index 0000000000..70d5e72c9d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../../src/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE;;GAEG;AACH,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts new file mode 100644 index 0000000000..7e3d06bd89 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts @@ -0,0 +1,19 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; + /** + * Indicates if this is an error response + */ + isError?: boolean; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map new file mode 100644 index 0000000000..c6c21cfdeb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../../src/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;CACnB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts new file mode 100644 index 0000000000..3bc59281e1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts @@ -0,0 +1,77 @@ +import { Serializer } from "./serializer"; +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { HttpMethods } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +/** + * A specification that defines how to perform a particular service operation over HTTP, including how to properly serialize request information into and deserialize response information into an object payload returnable by the {@link ServiceClient}. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The media type of the request body. + * This value can be used to aide in serialization if it is provided. + */ + readonly mediaType?: "json" | "xml" | "form" | "binary" | "multipart" | "text" | "unknown" | string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export declare function getStreamResponseStatusCodes(operationSpec: OperationSpec): Set; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map new file mode 100644 index 0000000000..45e867887e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../../src/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;;OAGG;IACH,QAAQ,CAAC,SAAS,CAAC,EACf,MAAM,GACN,KAAK,GACL,MAAM,GACN,QAAQ,GACR,WAAW,GACX,MAAM,GACN,SAAS,GACT,MAAM,CAAC;IACX;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED;;;GAGG;AACH,wBAAgB,4BAA4B,CAAC,aAAa,EAAE,aAAa,GAAG,GAAG,CAAC,MAAM,CAAC,CAYtF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts new file mode 100644 index 0000000000..790b1b05b7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts @@ -0,0 +1,63 @@ +import { DeserializationOptions } from "./policies/deserializationPolicy"; +import { HttpClient } from "./httpClient"; +import { KeepAliveOptions } from "./policies/keepAlivePolicy"; +import { LogPolicyOptions } from "./policies/logPolicy"; +import { ProxyOptions } from "./serviceClient"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RetryOptions } from "./policies/exponentialRetryPolicy"; +import { UserAgentOptions } from "./policies/userAgentPolicy"; +/** + * Defines options that are used to configure the HTTP pipeline for + * an SDK client. + */ +export interface PipelineOptions { + /** + * The HttpClient implementation to use for outgoing HTTP requests. Defaults + * to DefaultHttpClient. + */ + httpClient?: HttpClient; + /** + * Options that control how to retry failed requests. + */ + retryOptions?: RetryOptions; + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for how HTTP connections should be maintained for future + * requests. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; +} +/** + * Defines options that are used to configure internal options of + * the HTTP pipeline for an SDK client. + */ +export interface InternalPipelineOptions extends PipelineOptions { + /** + * Options to configure API response deserialization. + */ + deserializationOptions?: DeserializationOptions; + /** + * Options to configure request/response logging. + */ + loggingOptions?: LogPolicyOptions; + /** + * Configure whether to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Send JSON Array payloads as NDJSON. + */ + sendStreamingJson?: boolean; +} +//# sourceMappingURL=pipelineOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map new file mode 100644 index 0000000000..3a23655e3d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/pipelineOptions.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"pipelineOptions.d.ts","sourceRoot":"","sources":["../../../src/pipelineOptions.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,sBAAsB,EAAE,MAAM,kCAAkC,CAAC;AAC1E,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAC9D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAC/C,OAAO,EAAE,eAAe,EAAE,MAAM,2BAA2B,CAAC;AAC5D,OAAO,EAAE,YAAY,EAAE,MAAM,mCAAmC,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,4BAA4B,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B;;;OAGG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IAExB;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;OAEG;IACH,YAAY,CAAC,EAAE,YAAY,CAAC;IAE5B;;;OAGG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;IAEpC;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAElC;;OAEG;IACH,gBAAgB,CAAC,EAAE,gBAAgB,CAAC;CACrC;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,eAAe;IAC9D;;OAEG;IACH,sBAAsB,CAAC,EAAE,sBAAsB,CAAC;IAEhD;;OAEG;IACH,cAAc,CAAC,EAAE,gBAAgB,CAAC;IAElC;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAE7B;;OAEG;IACH,iBAAiB,CAAC,EAAE,OAAO,CAAC;CAC7B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts new file mode 100644 index 0000000000..d9c549a494 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts @@ -0,0 +1,33 @@ +import { TokenCredential } from "@azure/core-auth"; +import { RequestPolicyFactory } from "../policies/requestPolicy"; +interface TokenCyclerOptions { + /** + * The window of time before token expiration during which the token will be + * considered unusable due to risk of the token expiring before sending the + * request. + * + * This will only become meaningful if the refresh fails for over + * (refreshWindow - forcedRefreshWindow) milliseconds. + */ + forcedRefreshWindowInMs: number; + /** + * Interval in milliseconds to retry failed token refreshes. + */ + retryIntervalInMs: number; + /** + * The window of time before token expiration during which + * we will attempt to refresh the token. + */ + refreshWindowInMs: number; +} +export declare const DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions; +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export declare function bearerTokenAuthenticationPolicy(credential: TokenCredential, scopes: string | string[]): RequestPolicyFactory; +export {}; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map new file mode 100644 index 0000000000..9bdd1cdd6d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/bearerTokenAuthenticationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bearerTokenAuthenticationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/bearerTokenAuthenticationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAgC,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACjF,OAAO,EAGL,oBAAoB,EAErB,MAAM,2BAA2B,CAAC;AAgBnC,UAAU,kBAAkB;IAC1B;;;;;;;OAOG;IACH,uBAAuB,EAAE,MAAM,CAAC;IAChC;;OAEG;IACH,iBAAiB,EAAE,MAAM,CAAC;IAC1B;;;OAGG;IACH,iBAAiB,EAAE,MAAM,CAAC;CAC3B;AAGD,eAAO,MAAM,sBAAsB,EAAE,kBAIpC,CAAC;AA2KF;;;;;;GAMG;AACH,wBAAgB,+BAA+B,CAC7C,UAAU,EAAE,eAAe,EAC3B,MAAM,EAAE,MAAM,GAAG,MAAM,EAAE,GACxB,oBAAoB,CAgCtB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000000..2290bed1c4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts @@ -0,0 +1,59 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { SerializerOptions } from "../util/serializer.common"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to configure API response deserialization. + */ +export interface DeserializationOptions { + /** + * Configures the expected content types for the deserialization of + * JSON and XML response bodies. + */ + expectedContentTypes: DeserializationContentTypes; +} +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +export declare const DefaultDeserializationOptions: DeserializationOptions; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + readonly xmlCharKey: string; + constructor(nextPolicy: RequestPolicy, requestPolicyOptions: RequestPolicyOptions, deserializationContentTypes?: DeserializationContentTypes, parsingOptions?: SerializerOptions); + sendRequest(request: WebResourceLike): Promise; +} +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse, options?: SerializerOptions): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map new file mode 100644 index 0000000000..f026254d94 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,iBAAiB,EAAe,MAAM,2BAA2B,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAKjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;;OAGG;IACH,oBAAoB,EAAE,2BAA2B,CAAC;CACnD;AAED;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,CAAC,EAAE,iBAAiB,GACjC,oBAAoB,CAWtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF,eAAO,MAAM,6BAA6B,EAAE,sBAK3C,CAAC;AAEF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;IAC1C,SAAgB,UAAU,EAAE,MAAM,CAAC;gBAGjC,UAAU,EAAE,aAAa,EACzB,oBAAoB,EAAE,oBAAoB,EAC1C,2BAA2B,CAAC,EAAE,2BAA2B,EACzD,cAAc,GAAE,iBAAsB;IAW3B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD;;;;;;;GAOG;AACH,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,EAC/B,OAAO,GAAE,iBAAsB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4EhC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts new file mode 100644 index 0000000000..f071e6e0a5 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts @@ -0,0 +1,13 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting + * to use it will results in error being thrown. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map new file mode 100644 index 0000000000..45df97bb74 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.browser.ts"],"names":[],"mappings":"AAMA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAM7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED,qBAAa,kCAAmC,SAAQ,iBAAiB;gBAC3D,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAKvD,WAAW,CAAC,QAAQ,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAGhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts new file mode 100644 index 0000000000..949f8e2f6c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts @@ -0,0 +1,29 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResource } from "../webResource"; +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +export declare function disableResponseDecompressionPolicy(): RequestPolicyFactory; +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export declare class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResource): Promise; +} +//# sourceMappingURL=disableResponseDecompressionPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map new file mode 100644 index 0000000000..364460e029 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/disableResponseDecompressionPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"disableResponseDecompressionPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/disableResponseDecompressionPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C;;;GAGG;AACH,wBAAgB,kCAAkC,IAAI,oBAAoB,CAMzE;AAED;;;GAGG;AACH,qBAAa,kCAAmC,SAAQ,iBAAiB;IACvE;;;;;OAKG;gBAGS,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAIpE;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAI/E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000000..10fcb37161 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,73 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +export declare enum RetryMode { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ + Exponential = 0 +} +/** + * Options that control how to retry failed requests. + */ +export interface RetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; + /** + * The amount of delay in milliseconds between retry attempts. Defaults to 30000 + * (30 seconds). The delay increases exponentially with each retry up to a maximum + * specified by maxRetryDelayInMs. + */ + retryDelayInMs?: number; + /** + * The maximum delay in milliseconds allowed before retrying an operation. Defaults + * to 90000 (90 seconds). + */ + maxRetryDelayInMs?: number; + /** + * Currently supporting only Exponential mode. + */ + mode?: RetryMode; +} +export declare const DefaultRetryOptions: RetryOptions; +/** + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @param nextPolicy - The next RequestPolicy in the pipeline chain. + * @param options - The options for this RequestPolicy. + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 0000000000..8d1cca62ba --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;;;;GAKG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAYtB;AAED;;GAEG;AACH,oBAAY,SAAS;IACnB;;;OAGG;IACH,WAAW,IAAA;CACZ;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;;;OAIG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IAExB;;;OAGG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAE3B;;OAEG;IACH,IAAI,CAAC,EAAE,SAAS,CAAC;CAClB;AAED,eAAO,MAAM,mBAAmB,EAAE,YAIjC,CAAC;AAEF;;GAEG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;OAOG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM;IAUpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000000..827fd04651 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,14 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 0000000000..6c50dfe577 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACrB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts new file mode 100644 index 0000000000..dc137ca5fb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts @@ -0,0 +1,46 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how HTTP connections should be maintained for future + * requests. + */ +export interface KeepAliveOptions { + /** + * When true, connections will be kept alive for multiple requests. + * Defaults to true. + */ + enable: boolean; +} +/** + * By default, HTTP connections are maintained for future requests. + */ +export declare const DefaultKeepAliveOptions: KeepAliveOptions; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +export declare function keepAlivePolicy(keepAliveOptions?: KeepAliveOptions): RequestPolicyFactory; +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +export declare class KeepAlivePolicy extends BaseRequestPolicy { + private readonly keepAliveOptions; + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, keepAliveOptions: KeepAliveOptions); + /** + * Sends out request. + * + * @param request - + * @returns + */ + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=keepAlivePolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map new file mode 100644 index 0000000000..0852850b16 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/keepAlivePolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"keepAlivePolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/keepAlivePolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,MAAM,EAAE,OAAO,CAAC;CACjB;AAED;;GAEG;AACH,eAAO,MAAM,uBAAuB,EAAE,gBAErC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,gBAAgB,CAAC,EAAE,gBAAgB,GAAG,oBAAoB,CAMzF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAWlD,OAAO,CAAC,QAAQ,CAAC,gBAAgB;IAVnC;;;;;;OAMG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACZ,gBAAgB,EAAE,gBAAgB;IAKrD;;;;;OAKG;IACU,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAInF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts new file mode 100644 index 0000000000..98130d58f9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts @@ -0,0 +1,79 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Debugger } from "@azure/logger"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Sanitizer } from "../util/sanitizer"; +import { WebResourceLike } from "../webResource"; +/** + * Options to pass to the {@link logPolicy}. + * By default only a set list of headers are logged, though this can be configured. Request and response bodies are never logged. + */ +export interface LogPolicyOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to: + * x-ms-client-request-id, x-ms-return-client-request-id, x-ms-useragent, x-ms-correlation-request-id, + * x-ms-request-id, client-request-id, ms-cv, return-client-request-id, traceparent, Access-Control-Allow-Credentials, + * Access-Control-Allow-Headers, Access-Control-Allow-Methods, Access-Control-Allow-Origin, Access-Control-Expose-Headers, + * Access-Control-Max-Age, Access-Control-Request-Headers, Access-Control-Request-Method, Origin, Accept, Accept-Encoding, + * Cache-Control, Connection, Content-Length, Content-Type, Date, ETag, Expires, If-Match, If-Modified-Since, If-None-Match, + * If-Unmodified-Since, Last-Modified, Pragma, Request-Id, Retry-After, Server, Transfer-Encoding, and User-Agent. + * + * Any headers specified in this field will be added to that list. + * Any other values will be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; + /** + * The Debugger (logger) instance to use for writing pipeline logs. + */ + logger?: Debugger; +} +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +export declare function logPolicy(loggingOptions?: LogPolicyOptions): RequestPolicyFactory; +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +export declare class LogPolicy extends BaseRequestPolicy { + logger: Debugger; + sanitizer: Sanitizer; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames(): Set; + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames: Set); + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters(): Set; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters: Set); + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, { logger, allowedHeaderNames, allowedQueryParameters, }?: LogPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + private logRequest; + private logResponse; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map new file mode 100644 index 0000000000..c79f751d3a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAC;AACzC,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;;;;;;;;;OAWG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;IAElC;;OAEG;IACH,MAAM,CAAC,EAAE,QAAQ,CAAC;CACnB;AAED;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,cAAc,GAAE,gBAAqB,GAAG,oBAAoB,CAMrF;AAED;;GAEG;AACH,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,EAAE,QAAQ,CAAC;IACjB,SAAS,EAAE,SAAS,CAAC;IAErB;;;;;;OAMG;IACH,IAAW,kBAAkB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE3C;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB,CAAC,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,EAE5D;IAED;;;;OAIG;IACH,IAAW,sBAAsB,IAAI,GAAG,CAAC,MAAM,CAAC,CAE/C;IAED;;;;OAIG;IACH,IAAW,sBAAsB,CAAC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,EAEpE;gBAGC,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,EACE,MAAwB,EACxB,kBAAuB,EACvB,sBAA2B,GAC5B,GAAE,gBAAqB;IAOnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAO5E,OAAO,CAAC,UAAU;IAIlB,OAAO,CAAC,WAAW;CAKpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000000..3ff8522d3a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 0000000000..8cdd338108 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000000..1fac88842b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 0000000000..303309040e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAElD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts new file mode 100644 index 0000000000..83cec2016a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.native.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map new file mode 100644 index 0000000000..f1b906f768 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/msRestUserAgentPolicy.native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.native.d.ts","sourceRoot":"","sources":["../../../../src/policies/msRestUserAgentPolicy.native.ts"],"names":[],"mappings":"AAOA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAazD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts new file mode 100644 index 0000000000..4993bca543 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts @@ -0,0 +1,3 @@ +import { RequestPolicyFactory } from "./requestPolicy"; +export declare function ndJsonPolicy(): RequestPolicyFactory; +//# sourceMappingURL=ndJsonPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map new file mode 100644 index 0000000000..cad74b01c7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/ndJsonPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ndJsonPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/ndJsonPolicy.ts"],"names":[],"mappings":"AAMA,OAAO,EAGL,oBAAoB,EAErB,MAAM,iBAAiB,CAAC;AAIzB,wBAAgB,YAAY,IAAI,oBAAoB,CAMnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000000..41dab70930 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 0000000000..54a4ec004b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB;IAK7D,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts new file mode 100644 index 0000000000..77d535e779 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts @@ -0,0 +1,37 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export declare const globalNoProxyList: string[]; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export declare function proxyPolicy(proxySettings?: ProxySettings, options?: { + /** a list of patterns to override those loaded from NO_PROXY environment variable. */ + customNoProxyList?: string[]; +}): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + private customNoProxyList?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, proxySettings: ProxySettings, customNoProxyList?: string[] | undefined); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map new file mode 100644 index 0000000000..37f48a0a7e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAEzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AAEjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;GAGG;AACH,eAAO,MAAM,iBAAiB,EAAE,MAAM,EAAO,CAAC;AAyD9C;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAWtC;AAED;;;;GAIG;AACH,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CACzB,aAAa,CAAC,EAAE,aAAa,EAC7B,OAAO,CAAC,EAAE;IACR,sFAAsF;IACtF,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;CAC9B,GACA,oBAAoB,CAiBtB;AA2BD,qBAAa,WAAY,SAAQ,iBAAiB;IAIvC,aAAa,EAAE,aAAa;IACnC,OAAO,CAAC,iBAAiB,CAAC;gBAH1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,aAAa,EAAE,aAAa,EAC3B,iBAAiB,CAAC,sBAAU;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAa7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts new file mode 100644 index 0000000000..0ffbe3755b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts @@ -0,0 +1,33 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /** + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + /** + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map new file mode 100644 index 0000000000..86218f92f1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IAEzB;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF;;;;GAIG;AACH,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED;;GAEG;AACH,qBAAa,cAAe,SAAQ,iBAAiB;IACmB,QAAQ,CAAC,UAAU;gBAA7E,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,oBAAoB,EAAW,UAAU,SAAK;IAIvF,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts new file mode 100644 index 0000000000..71b4ac9180 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts @@ -0,0 +1,102 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +/** + * The underlying structure of a request policy. + */ +export interface RequestPolicy { + /** + * A method that retrieves an {@link HttpOperationResponse} given a {@link WebResourceLike} describing the request to be made. + * @param httpRequest - {@link WebResourceLike} describing the request to be made. + */ + sendRequest(httpRequest: WebResourceLike): Promise; +} +/** + * The base class from which all request policies derive. + */ +export declare abstract class BaseRequestPolicy implements RequestPolicy { + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + readonly _nextPolicy: RequestPolicy; + /** + * The options that can be passed to a given request policy. + */ + readonly _options: RequestPolicyOptionsLike; + /** + * The main method to implement that manipulates a request/response. + */ + protected constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy: RequestPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options: RequestPolicyOptionsLike); + /** + * Sends a network request based on the given web resource. + * @param webResource - A {@link WebResourceLike} that describes a HTTP request to be made. + */ + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map new file mode 100644 index 0000000000..80d5c3928a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;;OAGG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED;;GAEG;AACH,8BAAsB,iBAAkB,YAAW,aAAa;IAK5D;;OAEG;IACH,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC;;OAEG;IACH,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAX7C;;OAEG;IACH,SAAS;IACP;;OAEG;IACM,WAAW,EAAE,aAAa;IACnC;;OAEG;IACM,QAAQ,EAAE,wBAAwB;IAG7C;;;OAGG;aACa,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAoB;IACnB,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000000..8c70c79570 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 0000000000..ddd83eb3ad --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACpB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts new file mode 100644 index 0000000000..69b4531969 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts @@ -0,0 +1,20 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. + */ +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map new file mode 100644 index 0000000000..f6ba5f451a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;;;GAIG;AACH,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EACtB,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000000..11b68ec048 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - The client retry count. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 0000000000..1644efb783 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAYzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;;;;;;GAOG;AACH,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;GAMG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;gBAGvB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAapB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000000..2b43ae1e78 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,35 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +declare type ResponseHandler = (httpRequest: WebResourceLike, response: HttpOperationResponse) => Promise; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ +export declare function throttlingRetryPolicy(): RequestPolicyFactory; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private _handleResponse; + private numberOfRetries; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, _handleResponse?: ResponseHandler); + sendRequest(httpRequest: WebResourceLike): Promise; + private _defaultResponseHandler; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +export {}; +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 0000000000..847b1ce954 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,aAAK,eAAe,GAAG,CACrB,WAAW,EAAE,eAAe,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,OAAO,CAAC,qBAAqB,CAAC,CAAC;AAGpC;;;;;;;;;GASG;AACH,wBAAgB,qBAAqB,IAAI,oBAAoB,CAM5D;AAID;;;;;;;;GAQG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,eAAe,CAAkB;IACzC,OAAO,CAAC,eAAe,CAAK;gBAG1B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,eAAe,CAAC,EAAE,eAAe;IAMtB,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAYxE,uBAAuB;WAkCvB,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts new file mode 100644 index 0000000000..717316ec27 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts @@ -0,0 +1,31 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { Span } from "@azure/core-tracing"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Options to customize the tracing policy. + */ +export interface TracingPolicyOptions { + /** + * User agent used to better identify the outgoing requests traced by the tracing policy. + */ + userAgent?: string; +} +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ +export declare function tracingPolicy(tracingOptions?: TracingPolicyOptions): RequestPolicyFactory; +/** + * A policy that wraps outgoing requests with a tracing span. + */ +export declare class TracingPolicy extends BaseRequestPolicy { + private userAgent?; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, tracingOptions: TracingPolicyOptions); + sendRequest(request: WebResourceLike): Promise; + tryCreateSpan(request: WebResourceLike): Span | undefined; + private tryProcessError; + private tryProcessResponse; +} +//# sourceMappingURL=tracingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map new file mode 100644 index 0000000000..ee3c7967cb --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/tracingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tracingPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/tracingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EACL,IAAI,EAML,MAAM,qBAAqB,CAAC;AAC7B,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAQjD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;;;GAIG;AACH,wBAAgB,aAAa,CAAC,cAAc,GAAE,oBAAyB,GAAG,oBAAoB,CAM7F;AAED;;GAEG;AACH,qBAAa,aAAc,SAAQ,iBAAiB;IAClD,OAAO,CAAC,SAAS,CAAC,CAAS;gBAGzB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,oBAAoB,EAC7B,cAAc,EAAE,oBAAoB;IAMzB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAqBlF,aAAa,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI,GAAG,SAAS;IAsDzD,OAAO,CAAC,eAAe;IAgBvB,OAAO,CAAC,kBAAkB;CAe3B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000000..bb2c3708b4 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts @@ -0,0 +1,49 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from "./requestPolicy"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +/** + * Telemetry information. Key/value pairs to include inside the User-Agent string. + */ +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +/** + * Options for adding user agent details to outgoing requests. + */ +export interface UserAgentOptions { + /** + * String prefix to add to the user agent for outgoing requests. + * Defaults to an empty string. + */ + userAgentPrefix?: string; +} +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +export declare function getDefaultUserAgentValue(): string; +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptions; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map new file mode 100644 index 0000000000..f5466df389 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../../src/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAG1F,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAE7D;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAwBD,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE;;;GAGG;AACH,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED;;;;GAIG;AACH,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAenF;AAED;;GAEG;AACH,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,oBAAoB;IACvC,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,oBAAoB,EAC7B,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE;;OAEG;IACH,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts new file mode 100644 index 0000000000..6a1ee0759c --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts @@ -0,0 +1,14 @@ +/// +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; +import { HttpHeadersLike } from "./httpHeaders"; +import { ProxySettings } from "./serviceClient"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export declare function isUrlHttps(url: string): boolean; +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: tunnel.HttpsOverHttpsOptions): http.Agent | https.Agent; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map new file mode 100644 index 0000000000..bef30cb2e3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../../src/proxyAgent.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA+BZ;AAED,wBAAgB,UAAU,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAG/C;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,MAAM,CAAC,qBAAqB,GAC1C,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts new file mode 100644 index 0000000000..515f1f52d9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts @@ -0,0 +1,26 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + /** + * CSV: Each pair of segments joined by a single comma. + */ + Csv = ",", + /** + * SSV: Each pair of segments joined by a single space character. + */ + Ssv = " ", + /** + * TSV: Each pair of segments joined by a single tab character. + */ + Tsv = "\t", + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + Pipes = "|", + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map new file mode 100644 index 0000000000..2ecbfd11e3 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../../src/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,MAAM;IACT;;OAEG;IACH,GAAG,OAAO;IACV;;OAEG;IACH,KAAK,MAAM;IACX;;OAEG;IACH,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts b/node_modules/@azure/core-http/types/latest/src/restError.d.ts new file mode 100644 index 0000000000..fe37553889 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +/** + * An error resulting from an HTTP request to a service endpoint. + */ +export declare class RestError extends Error { + /** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ + static readonly REQUEST_SEND_ERROR: string; + /** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ + static readonly PARSE_ERROR: string; + /** + * The error code, if any. Can be one of the static error code properties (REQUEST_SEND_ERROR / PARSE_ERROR) or can be a string code from an underlying system call (E_NOENT). + */ + code?: string; + /** + * The HTTP status code of the response, if one was returned. + */ + statusCode?: number; + /** + * Outgoing request. + */ + request?: WebResourceLike; + /** + * Incoming response. + */ + response?: HttpOperationResponse; + /** + * Any additional details. In the case of deserialization errors, can be the processed response. + */ + details?: unknown; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map new file mode 100644 index 0000000000..75a4cb73db --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../../src/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAEhE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAKhD;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;IAClC;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD;;OAEG;IACH,IAAI,CAAC,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB;;OAEG;IACH,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B;;OAEG;IACH,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC;;OAEG;IACH,OAAO,CAAC,EAAE,OAAO,CAAC;gBAEhB,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB;CAkBnC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts new file mode 100644 index 0000000000..5915bf8508 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts @@ -0,0 +1,356 @@ +import { SerializerOptions } from "./util/serializer.common"; +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ +export declare class Serializer { + /** + * The provided model mapper. + */ + readonly modelMappers: { + [key: string]: any; + }; + /** + * Whether the contents are XML or not. + */ + readonly isXML?: boolean | undefined; + constructor( + /** + * The provided model mapper. + */ + modelMappers?: { + [key: string]: any; + }, + /** + * Whether the contents are XML or not. + */ + isXML?: boolean | undefined); + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ + validateConstraints(mapper: Mapper, value: unknown, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. + */ + serialize(mapper: Mapper, object: unknown, objectName?: string, options?: SerializerOptions): any; + /** + * Deserialize the given object based on its metadata defined in the mapper. + * + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. + * @param options - Controls behavior of XML parser and builder. + * @returns A valid deserialized Javascript object. + */ + deserialize(mapper: Mapper, responseBody: unknown, objectName: string, options?: SerializerOptions): any; +} +/** + * Description of various value constraints such as integer ranges and string regex. + */ +export interface MapperConstraints { + /** + * The value should be less than or equal to the `InclusiveMaximum` value. + */ + InclusiveMaximum?: number; + /** + * The value should be less than the `ExclusiveMaximum` value. + */ + ExclusiveMaximum?: number; + /** + * The value should be greater than or equal to the `InclusiveMinimum` value. + */ + InclusiveMinimum?: number; + /** + * The value should be greater than the `InclusiveMinimum` value. + */ + ExclusiveMinimum?: number; + /** + * The length should be smaller than the `MaxLength`. + */ + MaxLength?: number; + /** + * The length should be bigger than the `MinLength`. + */ + MinLength?: number; + /** + * The value must match the pattern. + */ + Pattern?: RegExp; + /** + * The value must contain fewer items than the MaxItems value. + */ + MaxItems?: number; + /** + * The value must contain more items than the `MinItems` value. + */ + MinItems?: number; + /** + * The value must contain only unique items. + */ + UniqueItems?: true; + /** + * The value should be exactly divisible by the `MultipleOf` value. + */ + MultipleOf?: number; +} +/** + * Type of the mapper. Includes known mappers. + */ +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +/** + * The type of a simple mapper. + */ +export interface SimpleMapperType { + /** + * Name of the type of the property. + */ + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +/** + * Helps build a mapper that describes how to map a set of properties of an object based on other mappers. + * + * Only one of the following properties should be present: `className`, `modelProperties` and `additionalProperties`. + */ +export interface CompositeMapperType { + /** + * Name of the composite mapper type. + */ + name: "Composite"; + /** + * Use `className` to reference another type definition. + */ + className?: string; + /** + * Use `modelProperties` when the reference to the other type has been resolved. + */ + modelProperties?: { + [propertyName: string]: Mapper; + }; + /** + * Used when a model has `additionalProperties: true`. Allows the generic processing of unnamed model properties on the response object. + */ + additionalProperties?: Mapper; + /** + * The name of the top-most parent scheme, the one that has no parents. + */ + uberParent?: string; + /** + * A polymorphic discriminator. + */ + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +/** + * Helps build a mapper that describes how to parse a sequence of mapped values. + */ +export interface SequenceMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Sequence"; + /** + * The mapper to use to map each one of the properties of the sequence. + */ + element: Mapper; +} +/** + * Helps build a mapper that describes how to parse a dictionary of mapped values. + */ +export interface DictionaryMapperType { + /** + * Name of the sequence type mapper. + */ + name: "Dictionary"; + /** + * The mapper to use to map the value of each property in the dictionary. + */ + value: Mapper; +} +/** + * Helps build a mapper that describes how to parse an enum value. + */ +export interface EnumMapperType { + /** + * Name of the enum type mapper. + */ + name: "Enum"; + /** + * Values allowed by this mapper. + */ + allowedValues: any[]; +} +/** + * The base definition of a mapper. Can be used for XML and plain JavaScript objects. + */ +export interface BaseMapper { + /** + * Name for the xml element + */ + xmlName?: string; + /** + * Xml element namespace + */ + xmlNamespace?: string; + /** + * Xml element namespace prefix + */ + xmlNamespacePrefix?: string; + /** + * Determines if the current property should be serialized as an attribute of the parent xml element + */ + xmlIsAttribute?: boolean; + /** + * Name for the xml elements when serializing an array + */ + xmlElementName?: string; + /** + * Whether or not the current property should have a wrapping XML element + */ + xmlIsWrapped?: boolean; + /** + * Whether or not the current property is readonly + */ + readOnly?: boolean; + /** + * Whether or not the current property is a constant + */ + isConstant?: boolean; + /** + * Whether or not the current property is required + */ + required?: boolean; + /** + * Whether or not the current property allows mull as a value + */ + nullable?: boolean; + /** + * The name to use when serializing + */ + serializedName?: string; + /** + * Type of the mapper + */ + type: MapperType; + /** + * Default value when one is not explicitly provided + */ + defaultValue?: any; + /** + * Constraints to test the current value against + */ + constraints?: MapperConstraints; +} +/** + * Mappers are definitions of the data models used in the library. + * These data models are part of the Operation or Client definitions in the responses or parameters. + */ +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +/** + * Used to disambiguate discriminated type unions. + * For example, if response can have many shapes but also includes a 'kind' field (or similar), + * that field can be used to determine how to deserialize the response to the correct type. + */ +export interface PolymorphicDiscriminator { + /** + * Name of the discriminant property in the original JSON payload, e.g. `@odata.kind`. + */ + serializedName: string; + /** + * Name to use on the resulting object instead of the original property name. + * Useful since the JSON property could be difficult to work with. + * For example: For a field received as `@odata.kind`, the final object could instead include a property simply named `kind`. + */ + clientName: string; + /** + * It may contain any other property. + */ + [key: string]: string; +} +/** + * A mapper composed of other mappers. + */ +export interface CompositeMapper extends BaseMapper { + /** + * The type descriptor of the `CompositeMapper`. + */ + type: CompositeMapperType; +} +/** + * A mapper describing arrays. + */ +export interface SequenceMapper extends BaseMapper { + /** + * The type descriptor of the `SequenceMapper`. + */ + type: SequenceMapperType; +} +/** + * A mapper describing plain JavaScript objects used as key/value pairs. + */ +export interface DictionaryMapper extends BaseMapper { + /** + * The type descriptor of the `DictionaryMapper`. + */ + type: DictionaryMapperType; + /** + * Optionally, a prefix to add to the header collection. + */ + headerCollectionPrefix?: string; +} +/** + * A mapper describing an enum value. + */ +export interface EnumMapper extends BaseMapper { + /** + * The type descriptor of the `EnumMapper`. + */ + type: EnumMapperType; +} +/** + * An interface representing an URL parameter value. + */ +export interface UrlParameterValue { + /** + * The URL value. + */ + value: string; + /** + * Whether to keep or skip URL encoding. + */ + skipUrlEncoding: boolean; +} +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ +export declare function serializeObject(toSerialize: unknown): any; +/** + * String enum containing the string types of property mappers. + */ +export declare const MapperType: { + Date: "Date"; + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map new file mode 100644 index 0000000000..7076d7ff5d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../../src/serializer.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,0BAA0B,CAAC;AAIvF;;;;GAIG;AACH,qBAAa,UAAU;IAEnB;;OAEG;aACa,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;IACpD;;OAEG;aACa,KAAK,CAAC;;IAPtB;;OAEG;IACa,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO;IACzD;;OAEG;IACa,KAAK,CAAC,qBAAS;IAGjC;;;;;OAKG;IACH,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IAmE7E;;;;;;;;OAQG;IACH,SAAS,CACP,MAAM,EAAE,MAAM,EACd,MAAM,EAAE,OAAO,EACf,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;IA6FN;;;;;;;;OAQG;IACH,WAAW,CACT,MAAM,EAAE,MAAM,EACd,YAAY,EAAE,OAAO,EACrB,UAAU,EAAE,MAAM,EAClB,OAAO,GAAE,iBAAsB,GAC9B,GAAG;CAmGP;AAkyBD;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED;;GAEG;AACH,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB;;GAEG;AACH,MAAM,WAAW,gBAAgB;IAC/B;;OAEG;IACH,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED;;;;GAIG;AACH,MAAM,WAAW,mBAAmB;IAClC;;OAEG;IACH,IAAI,EAAE,WAAW,CAAC;IAElB;;OAEG;IACH,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB;;OAEG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAErD;;OAEG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB;;OAEG;IACH,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;OAEG;IACH,IAAI,EAAE,YAAY,CAAC;IACnB;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IACb;;OAEG;IACH,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB;;OAEG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;OAEG;IACH,IAAI,EAAE,UAAU,CAAC;IACjB;;OAEG;IACH,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB;;OAEG;IACH,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED;;;GAGG;AACH,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG;;;;GAIG;AACH,MAAM,WAAW,wBAAwB;IACvC;;OAEG;IACH,cAAc,EAAE,MAAM,CAAC;IACvB;;;;OAIG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED;;GAEG;AACH,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD;;OAEG;IACH,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED;;GAEG;AACH,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD;;OAEG;IACH,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD;;OAEG;IACH,IAAI,EAAE,oBAAoB,CAAC;IAC3B;;OAEG;IACH,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C;;OAEG;IACH,IAAI,EAAE,cAAc,CAAC;CACtB;AAED;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;IACd;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,wBAAgB,eAAe,CAAC,WAAW,EAAE,OAAO,GAAG,GAAG,CAsBzD;AAaD;;GAEG;AAEH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts new file mode 100644 index 0000000000..7fb2de30db --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts @@ -0,0 +1,168 @@ +import { Mapper, Serializer } from "./serializer"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { ServiceCallback } from "./util/utils"; +import { TokenCredential } from "@azure/core-auth"; +import { HttpClient } from "./httpClient"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { InternalPipelineOptions } from "./pipelineOptions"; +import { OperationArguments } from "./operationArguments"; +import { OperationResponse } from "./operationResponse"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +/** + * Options to configure a proxy for outgoing requests (Node.js only). + */ +export interface ProxySettings { + /** + * The proxy's host address. + */ + host: string; + /** + * The proxy host's port. + */ + port: number; + /** + * The user name to authenticate with the proxy, if required. + */ + username?: string; + /** + * The password to authenticate with the proxy, if required. + */ + password?: string; +} +/** + * An alias of {@link ProxySettings} for future use. + */ +export declare type ProxyOptions = ProxySettings; +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-useragent" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * If specified, will be used to build the BearerTokenAuthenticationPolicy. + */ + credentialScopes?: string | string[]; +} +/** + * ServiceClient sends service requests and receives responses. + */ +export declare class ServiceClient { + /** + * If specified, this is the base URI that requests will be made against for this ServiceClient. + * If it is not specified, then all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @param credentials - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(credentials?: TokenCredential | ServiceClientCredentials, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + * @param callback - The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ +export declare function createPipelineFromOptions(pipelineOptions: InternalPipelineOptions, authPolicyFactory?: RequestPolicyFactory): ServiceClientOptions; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map new file mode 100644 index 0000000000..43bc7ac5e7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../../src/serviceClient.ts"],"names":[],"mappings":"AAIA,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACjG,OAAO,EAEL,2BAA2B,EAE5B,MAAM,kCAAkC,CAAC;AAI1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAE9E,OAAO,EAEL,aAAa,EAGd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,aAAa,EAAgC,MAAM,iBAAiB,CAAC;AAC9E,OAAO,EAEL,qBAAqB,EAErB,eAAe,EAEhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAEL,oBAAoB,EAErB,MAAM,0BAA0B,CAAC;AAElC,OAAO,EAAE,eAAe,EAAU,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AAMtE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,mBAAmB,CAAC;AAC5D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAExD,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAgBlF;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,oBAAY,YAAY,GAAG,aAAa,CAAC;AAEzC;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;CACtC;AAED;;GAEG;AACH,qBAAa,aAAa;IACxB;;;OAGG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAAuB;IAE7D,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;OAIG;gBAED,WAAW,CAAC,EAAE,eAAe,GAAG,wBAAwB,EAExD,OAAO,CAAC,EAAE,oBAAoB;IA+EhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACG,oBAAoB,CACxB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CA0OzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAyGN;AAqFD;;;;;GAKG;AACH,wBAAgB,yBAAyB,CACvC,eAAe,EAAE,uBAAuB,EACxC,iBAAiB,CAAC,EAAE,oBAAoB,GACvC,oBAAoB,CAmFtB;AAED,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAmEL;AA6BD;;;;;GAKG;AACH,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAyEd"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts b/node_modules/@azure/core-http/types/latest/src/url.d.ts new file mode 100644 index 0000000000..9e3359adf1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts @@ -0,0 +1,160 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Get the keys of the query string. + */ + keys(): string[]; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: unknown): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/url.d.ts.map b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map new file mode 100644 index 0000000000..4604c58008 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../../src/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;OAEG;IACI,IAAI,IAAI,MAAM,EAAE;IAIvB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,OAAO,GAAG,IAAI;IAgBhE;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,OAAO,GAAG,IAAI;IASxF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAsCX;;;OAGG;IACI,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;IAUlE;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts new file mode 100644 index 0000000000..e54d75975b --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map new file mode 100644 index 0000000000..1d1a90ce4d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts new file mode 100644 index 0000000000..63e96309d7 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value - The string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value - The Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value - The base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map new file mode 100644 index 0000000000..2b42c1c723 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../../src/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts new file mode 100644 index 0000000000..b474b275a9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts @@ -0,0 +1,72 @@ +/** + * A set of constants used internally when processing requests. + */ +export declare const Constants: { + /** + * The core-http version + */ + coreHttpVersion: string; + /** + * Specifies HTTP. + */ + HTTP: string; + /** + * Specifies HTTPS. + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + ServiceUnavailable: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map new file mode 100644 index 0000000000..04b9b888cc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../../src/util/constants.ts"],"names":[],"mappings":"AAEA;;GAEG;AACH,eAAO,MAAM,SAAS;IACpB;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;WAEG;;;;;;;;;;;;;;;IAiBL;;OAEG;;QAED;;WAEG;;;QAKH;;;;WAIG;;QAGH;;WAEG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts new file mode 100644 index 0000000000..12c86ad7a1 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts @@ -0,0 +1,15 @@ +import { AbortSignalLike } from "@azure/abort-controller"; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * @param abortSignal - The abortSignal associated with containing operation. + * @param abortErrorMsg - The abort error message associated with containing operation. + * @returns - Resolved promise + */ +export declare function delay(delayInMs: number, value?: T, options?: { + abortSignal?: AbortSignalLike; + abortErrorMsg?: string; +}): Promise; +//# sourceMappingURL=delay.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map new file mode 100644 index 0000000000..53e5b20e9d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/delay.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"delay.d.ts","sourceRoot":"","sources":["../../../../src/util/delay.ts"],"names":[],"mappings":"AAGA,OAAO,EAAc,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAKtE;;;;;;;;GAQG;AACH,wBAAgB,KAAK,CAAC,CAAC,EACrB,SAAS,EAAE,MAAM,EACjB,KAAK,CAAC,EAAE,CAAC,EACT,OAAO,CAAC,EAAE;IACR,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB,GACA,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC,CAsCnB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts new file mode 100644 index 0000000000..04a0945fd9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts @@ -0,0 +1,40 @@ +import { HttpOperationResponse } from "../coreHttp"; +export declare const DEFAULT_CLIENT_RETRY_COUNT = 3; +export declare const DEFAULT_CLIENT_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; +export declare const DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; +export declare function isNumber(n: unknown): n is number; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +/** + * @internal + * Determines if the operation should be retried. + * + * @param retryLimit - Specifies the max number of retries. + * @param predicate - Initial chekck on whether to retry based on given responses or errors + * @param retryData - The retry data. + * @returns True if the operation qualifies for a retry; false otherwise. + */ +export declare function shouldRetry(retryLimit: number, predicate: (response?: HttpOperationResponse, error?: RetryError) => boolean, retryData: RetryData, response?: HttpOperationResponse, error?: RetryError): boolean; +/** + * @internal + * Updates the retry data for the next attempt. + * + * @param retryOptions - specifies retry interval, and its lower bound and upper bound. + * @param retryData - The retry data. + * @param err - The operation"s error, if any. + */ +export declare function updateRetryData(retryOptions: { + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; +}, retryData?: RetryData, err?: RetryError): RetryData; +//# sourceMappingURL=exponentialBackoffStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map new file mode 100644 index 0000000000..9a5f40b993 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/exponentialBackoffStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialBackoffStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/exponentialBackoffStrategy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,aAAa,CAAC;AAEpD,eAAO,MAAM,0BAA0B,IAAI,CAAC;AAE5C,eAAO,MAAM,6BAA6B,QAAY,CAAC;AACvD,eAAO,MAAM,iCAAiC,QAAY,CAAC;AAC3D,eAAO,MAAM,iCAAiC,QAAW,CAAC;AAE1D,wBAAgB,QAAQ,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,IAAI,MAAM,CAEhD;AACD,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;;;;;;;GAQG;AACH,wBAAgB,WAAW,CACzB,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,CAAC,QAAQ,CAAC,EAAE,qBAAqB,EAAE,KAAK,CAAC,EAAE,UAAU,KAAK,OAAO,EAC5E,SAAS,EAAE,SAAS,EACpB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,KAAK,CAAC,EAAE,UAAU,GACjB,OAAO,CAMT;AAED;;;;;;;GAOG;AACH,wBAAgB,eAAe,CAC7B,YAAY,EAAE;IAAE,aAAa,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAC;IAAC,gBAAgB,EAAE,MAAM,CAAA;CAAE,EAC3F,SAAS,GAAE,SAA+C,EAC1D,GAAG,CAAC,EAAE,UAAU,GACf,SAAS,CAyBX"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts new file mode 100644 index 0000000000..e136c5382e --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts @@ -0,0 +1,2 @@ +export declare const custom: {}; +//# sourceMappingURL=inspect.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map new file mode 100644 index 0000000000..d35d3353bf --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.browser.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,MAAM,IAAK,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts new file mode 100644 index 0000000000..cd664b8ca9 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts @@ -0,0 +1,2 @@ +export declare const custom: symbol; +//# sourceMappingURL=inspect.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map new file mode 100644 index 0000000000..37ed38baa6 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/inspect.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"inspect.d.ts","sourceRoot":"","sources":["../../../../src/util/inspect.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,MAAM,QAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts new file mode 100644 index 0000000000..c00416aafe --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts @@ -0,0 +1,25 @@ +export interface SanitizerOptions { + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + */ + allowedHeaderNames?: string[]; + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + */ + allowedQueryParameters?: string[]; +} +export declare class Sanitizer { + allowedHeaderNames: Set; + allowedQueryParameters: Set; + constructor({ allowedHeaderNames, allowedQueryParameters }?: SanitizerOptions); + sanitize(obj: unknown): string; + private sanitizeHeaders; + private sanitizeQuery; + private sanitizeObject; + private sanitizeUrl; +} +//# sourceMappingURL=sanitizer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map new file mode 100644 index 0000000000..e5cf8f5478 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/sanitizer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"sanitizer.d.ts","sourceRoot":"","sources":["../../../../src/util/sanitizer.ts"],"names":[],"mappings":"AAMA,MAAM,WAAW,gBAAgB;IAC/B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,EAAE,CAAC;IAE9B;;;OAGG;IACH,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;CACnC;AAkDD,qBAAa,SAAS;IACb,kBAAkB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IAChC,sBAAsB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;gBAE/B,EAAE,kBAAuB,EAAE,sBAA2B,EAAE,GAAE,gBAAqB;IAapF,QAAQ,CAAC,GAAG,EAAE,OAAO,GAAG,MAAM;IA2CrC,OAAO,CAAC,eAAe;IAIvB,OAAO,CAAC,aAAa;IAIrB,OAAO,CAAC,cAAc;IAsBtB,OAAO,CAAC,WAAW;CAsBpB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts new file mode 100644 index 0000000000..cd9881b5d0 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts @@ -0,0 +1,26 @@ +/** + * Default key used to access the XML attributes. + */ +export declare const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export declare const XML_CHARKEY = "_"; +/** + * Options to govern behavior of xml parser and builder. + */ +export interface SerializerOptions { + /** + * indicates the name of the root element in the resulting XML when building XML. + */ + rootName?: string; + /** + * indicates whether the root element is to be included or not in the output when parsing XML. + */ + includeRoot?: boolean; + /** + * key used to access the XML value content when parsing XML. + */ + xmlCharKey?: string; +} +//# sourceMappingURL=serializer.common.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map new file mode 100644 index 0000000000..c6abf329ae --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/serializer.common.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.common.d.ts","sourceRoot":"","sources":["../../../../src/util/serializer.common.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAC/B;;GAEG;AACH,eAAO,MAAM,WAAW,MAAM,CAAC;AAE/B;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB;;OAEG;IACH,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts new file mode 100644 index 0000000000..5ebc348406 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts @@ -0,0 +1,5 @@ +/** + * Maximum number of retries for the throttling retry policy + */ +export declare const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; +//# sourceMappingURL=throttlingRetryStrategy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map new file mode 100644 index 0000000000..5403e4d70f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/throttlingRetryStrategy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryStrategy.d.ts","sourceRoot":"","sources":["../../../../src/util/throttlingRetryStrategy.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,eAAO,MAAM,8BAA8B,IAAI,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts new file mode 100644 index 0000000000..e10f65bdbf --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts @@ -0,0 +1,7 @@ +/** + * Helper TypeGuard that checks if the value is not null or undefined. + * @param thing - Anything + * @internal + */ +export declare function isDefined(thing: T | undefined | null): thing is T; +//# sourceMappingURL=typeguards.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map new file mode 100644 index 0000000000..6579652483 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/typeguards.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"typeguards.d.ts","sourceRoot":"","sources":["../../../../src/util/typeguards.ts"],"names":[],"mappings":"AAGA;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,GAAG,SAAS,GAAG,IAAI,GAAG,KAAK,IAAI,CAAC,CAEpE"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts new file mode 100644 index 0000000000..3a804c5807 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts @@ -0,0 +1,131 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param urlToCheck - The url to check + * @returns True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param uri - The URI to be encoded. + * @returns The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param response - The Http Response + * @returns The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param request - The Http Request object + * @returns The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param uuid - The uuid as a string that needs to be validated + * @returns True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Generated UUID + * + * @returns RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param promiseFactories - An array of promise factories(A function that return a promise) + * @param kickstart - Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * @returns A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: unknown): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param err - The error occurred if any, while executing the request; otherwise null. + * @param result - The deserialized response body if an error did not occur. + * @param request - The raw/actual request sent to the server if an error did not occur. + * @param response - The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param promise - The Promise to be converted to a callback + * @returns A function that takes the callback `(cb: Function) => void` + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): (cb: Function) => void; +/** + * Converts a Promise to a service callback. + * @param promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): (cb: ServiceCallback) => void; +export declare function prepareXMLRootList(obj: unknown, elementName: string, xmlNamespaceKey?: string, xmlNamespace?: string): { + [s: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param targetCtor - The target object on which the properties need to be applied. + * @param sourceCtors - An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtorParam: unknown, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @returns `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param value - The value to search and replace in. + * @param searchValue - The value to search for in the value argument. + * @param replaceValue - The value to replace searchValue with in the value argument. + * @returns The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given entity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value - Any entity + * @returns true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: unknown): boolean; +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare type UnknownObject = { + [s: string]: unknown; +}; +/** + * @internal + * @returns true when input is an object type that is not null, Array, RegExp, or Date. + */ +export declare function isObject(input: unknown): input is UnknownObject; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map new file mode 100644 index 0000000000..4c0165c1bc --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../../src/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAOjD;;GAEG;AACH,eAAO,MAAM,MAAM,SAIM,CAAC;AAE1B;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;GAMG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;GAMG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;GAKG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;GAQG;AACH,wBAAgB,2BAA2B,CACzC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAC5B,SAAS,EAAE,OAAO,GACjB,OAAO,CAAC,GAAG,CAAC,CAMd;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AAEH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,KAAK,IAAI,CAgB/E;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EACxC,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GACtC,CAAC,EAAE,EAAE,eAAe,CAAC,CAAC,CAAC,KAAK,IAAI,CAalC;AAED,wBAAgB,kBAAkB,CAChC,GAAG,EAAE,OAAO,EACZ,WAAW,EAAE,MAAM,EACnB,eAAe,CAAC,EAAE,MAAM,EACxB,YAAY,CAAC,EAAE,MAAM,GACpB;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAYtB;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,eAAe,EAAE,OAAO,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAS9E;AAKD;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,OAAO,GAAG,OAAO,CAEvD;AAED,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AAED;;GAEG;AACH,oBAAY,aAAa,GAAG;IAAE,CAAC,CAAC,EAAE,MAAM,GAAG,OAAO,CAAA;CAAE,CAAC;AAErD;;;GAGG;AACH,wBAAgB,QAAQ,CAAC,KAAK,EAAE,OAAO,GAAG,KAAK,IAAI,aAAa,CAQ/D"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts new file mode 100644 index 0000000000..c92f70d222 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts @@ -0,0 +1,4 @@ +import { SerializerOptions } from "./serializer.common"; +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +export declare function stringifyXML(content: unknown, opts?: SerializerOptions): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map new file mode 100644 index 0000000000..269d709522 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AAgClF,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAqBhF;AA4FD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAWnF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts new file mode 100644 index 0000000000..088841b9de --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts @@ -0,0 +1,14 @@ +import { SerializerOptions } from "./serializer.common"; +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the parsing of given JSON object + */ +export declare function stringifyXML(obj: unknown, opts?: SerializerOptions): string; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + */ +export declare function parseXML(str: string, opts?: SerializerOptions): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map new file mode 100644 index 0000000000..e06390ce7d --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../../src/util/xml.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,iBAAiB,EAA4B,MAAM,qBAAqB,CAAC;AA0DlF;;;;GAIG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE,OAAO,EAAE,IAAI,GAAE,iBAAsB,GAAG,MAAM,CAK/E;AAED;;;;GAIG;AACH,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,GAAE,iBAAsB,GAAG,OAAO,CAAC,GAAG,CAAC,CAiBhF"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts new file mode 100644 index 0000000000..a036e53484 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts @@ -0,0 +1,445 @@ +/// +import { Context, SpanOptions } from "@azure/core-tracing"; +import { HttpHeadersLike } from "./httpHeaders"; +import { Mapper } from "./serializer"; +import { AbortSignalLike } from "@azure/abort-controller"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { OperationSpec } from "./operationSpec"; +import { ProxySettings } from "./serviceClient"; +import { SerializerOptions } from "./util/serializer.common"; +/** + * List of supported HTTP methods. + */ +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +/** + * Possible HTTP request body types + */ +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * A description of a HTTP request to be made to a remote server. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of response status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * A unique identifier for the request. Used for logging and tracing. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: unknown): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + */ +export declare class WebResource implements WebResourceLike { + /** + * URL of the outgoing request. + */ + url: string; + /** + * HTTP method to use. + */ + method: HttpMethods; + /** + * Request body. + */ + body?: any; + /** + * HTTP headers. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + * @deprecated Use streamResponseStatusCodes property instead. + */ + streamResponseBody?: boolean; + /** + * A list of status codes whose corresponding HttpOperationResponse body should be treated as a stream. + */ + streamResponseStatusCodes?: Set; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + /** + * Form data, used to build the request body. + */ + formData?: any; + /** + * Query added to the URL. + */ + query?: { + [key: string]: any; + }; + /** + * Specification of the HTTP request. + */ + operationSpec?: OperationSpec; + /** + * Whether to send credentials (via cookies, authorization headers, or TLS client certificates) when making a request in the browser to a cross-site destination. + */ + withCredentials: boolean; + /** + * How long to wait in milliseconds before aborting the request. + */ + timeout: number; + /** + * What proxy to use, if necessary. + */ + proxySettings?: ProxySettings; + /** + * Whether to keep the HTTP connections alive throughout requests. + */ + keepAlive?: boolean; + /** + * Whether or not to decompress response according to Accept-Encoding header (node-fetch only) + */ + decompressResponse?: boolean; + /** + * Unique identifier of the outgoing request. + */ + requestId: string; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating Spans. + */ + tracingContext?: Context; + constructor(url?: string, method?: HttpMethods, body?: unknown, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, decompressResponse?: boolean, streamResponseStatusCodes?: Set); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param options - Options to provide for preparing the request. + * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +/** + * Options to prepare an outgoing HTTP request. + */ +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: `{ "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } }` + * - query-parameter-value in "string" format: `{ "query-parameter-name": "query-parameter-value"}`. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: `/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}` + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: `{ "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } }` + * - path-parameter-value in "string" format: `{ "path-parameter-name": "path-parameter-value" }`. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * Form data, used to build the request body. + */ + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: Record; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * Allows keeping track of the progress of uploading the outgoing request. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Allows keeping track of the progress of downloading the incoming response. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Tracing: Options used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + /** + * Value of the parameter. + */ + value: any; + /** + * Disables URL encoding if set to true. + */ + skipUrlEncoding: boolean; + /** + * Parameter values may contain any other property. + */ + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * Signal of an abort controller. Can be used to abort both sending a network request and waiting for a response. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * Tracing: Context used when creating spans. + */ + tracingContext?: Context; + /** + * May contain other properties. + */ + [key: string]: any; + /** + * Options to override XML parsing/building behavior. + */ + serializerOptions?: SerializerOptions; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map new file mode 100644 index 0000000000..369a753177 --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../../src/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAC3D,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,eAAe,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,iBAAiB,EAAE,MAAM,0BAA0B,CAAC;AAG7D;;GAEG;AACH,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AAEZ;;GAEG;AACH,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,OAAO,GAAG,MAAM,IAAI,eAAe,CAuB5E;AAED;;;;;GAKG;AACH,qBAAa,WAAY,YAAW,eAAe;IACjD;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;;OAGG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;IACxC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC;;OAEG;IACH,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,CAAC;IAElB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAE1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;gBAGvB,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,OAAO,EACd,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,kBAAkB,CAAC,EAAE,OAAO,EAC5B,yBAAyB,CAAC,EAAE,GAAG,CAAC,MAAM,CAAC;IAqBzC;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IAwNpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAChD;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D;;OAEG;IACH,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;CAC1B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B;;OAEG;IACH,KAAK,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;;OAGG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAE7E;;OAEG;IACH,cAAc,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;IAEnB;;OAEG;IACH,iBAAiB,CAAC,EAAE,iBAAiB,CAAC;CACvC"} \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts new file mode 100644 index 0000000000..eea3572d4a --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpHeadersLike } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeadersLike; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map new file mode 100644 index 0000000000..3c18856f9f --- /dev/null +++ b/node_modules/@azure/core-http/types/latest/src/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../../src/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAyB,eAAe,EAAE,MAAM,eAAe,CAAC;AAEvE,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CA6F7E;AAwED,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,GAAG,eAAe,CAajE"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/CHANGELOG.md b/node_modules/@azure/core-lro/CHANGELOG.md new file mode 100644 index 0000000000..7b16b27f84 --- /dev/null +++ b/node_modules/@azure/core-lro/CHANGELOG.md @@ -0,0 +1,82 @@ +# Release History + +## 2.2.4 (2022-03-07) + +### Bugs Fixed + +- Fix polling so that resources created in a different URL will be retrieved once polling is done. [PR #20656](https://github.com/Azure/azure-sdk-for-js/pull/20656) + +## 2.2.3 (2022-01-06) + +### Bugs Fixed + +- Fix an issue where we treat Retry-After value as milliseconds. It is actually in seconds. [PR #19479](https://github.com/Azure/azure-sdk-for-js/pull/19479) + +## 2.2.2 (2021-12-02) + +### Bugs Fixed + +- Fix LRO PATCH operations when their results are located in a different URL. [PR #18820](https://github.com/Azure/azure-sdk-for-js/pull/18820) + +## 2.2.1 (2021-09-30) + +### Bugs Fixed + +- Check for string type before calling toLowerCase(). [PR #17573](https://github.com/Azure/azure-sdk-for-js/pull/17573) + +### Other Changes + +- Updates package to work with the react native bundler. [PR #17783](https://github.com/Azure/azure-sdk-for-js/pull/17783) + +## 2.2.0 (2021-08-05) + +### Features Added + +- `LroEngine` supports a new `isDone()` function in its options bag which can be used to provide a custom logic for determining when an LRO finished processing. + +## 2.1.0 (2021-07-19) + +### Features Added + +- Provides a long-running operation engine. + +## 2.0.0 (2021-06-30) + +### New Features + +- Changed TS compilation target to ES2017 in order to produce smaller bundles and use more native platform features + +## 1.0.5 (2021-04-12) + +- No functionality changes from 1.0.4. This release is to correct an issue where 1.0.4 shipped with modules in the wrong format (cjs instead of es6.) + +## 1.0.4 (2021-03-30) + +- Bug fix: Fix an issue where we might return stale state if the `update` implementation reassigns `operation.state`. + +### Breaking Changes + +- Updated @azure/core-tracing to version `1.0.0-preview.11`. See [@azure/core-tracing CHANGELOG](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/CHANGELOG.md) for details about breaking changes with tracing. + +## 1.0.3 (2021-01-07) + +- Updates the `tslib` dependency to version 2.x. + +## 1.0.2 (2020-04-28) + +- Moved `@opentelemetry/types` to the `devDependencies`. + +## 1.0.1 (2020-02-28) + +- `getOperationState()` now returns `TState`. +- `TState` of `PollerLike` can be a subset of `TState` of `Poller`, + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/core-lro` package. + +- Updated PollOperation to be more strictly typed. + +## 1.0.0-preview.1 (2019-10-22) + +- Initial implementation of an abstraction for Long Running Operations (LROs). diff --git a/node_modules/@azure/core-lro/LICENSE b/node_modules/@azure/core-lro/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/core-lro/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-lro/README.md b/node_modules/@azure/core-lro/README.md new file mode 100644 index 0000000000..e00b74092a --- /dev/null +++ b/node_modules/@azure/core-lro/README.md @@ -0,0 +1,92 @@ +# Azure Core LRO client library for JavaScript + +`@azure/core-lro` is a JavaScript library that manages long running operations (LROs) against Azure services. Until completion, such operations require consecutive calls to Azure services to update a local representation of the remote operation status. + +**Please note:** This library is included with other Azure SDK libraries that need it. It should not be used as a direct dependency in your projects. + +`@azure/core-lro` is made following our [Long Running Operations guidelines](https://azure.github.io/azure-sdk/typescript_design.html#ts-lro) + +Key links: +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro) +- [Package (npm)](https://www.npmjs.com/package/@azure/core-lro) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/core-lro) +- [Samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) + +## Getting started + +### Install the package + +To install this library for a project under the `azure-sdk-for-js`, make sure you are at the root of that project, then use [Rush](https://rushjs.io/) as follows: + +``` +rush add -p @azure/core-lro +``` + +To install this package outside of the `azure-sdk-for-js`, use `npm install --save @azure/core-lro`. + +### Configure TypeScript + +TypeScript users need to have Node type definitions installed: + +```bash +npm install @types/node +``` + +They will also need to enable `compilerOptions.allowSyntheticDefaultImports` in their +`tsconfig.json`. Note that if you have enabled `compilerOptions.esModuleInterop`, +`allowSyntheticDefaultImports` is enabled by default. +See [TypeScript's compiler options handbook](https://www.typescriptlang.org/docs/handbook/compiler-options.html) +for more information. + +## Key concepts + +@azure/core-lro makes a distinction between the Long Running Operation and its Poller. + +- Whenever we talk about an **operation**, we mean the static representation of a Long Running Operation. + Any operation will have a definition of a **state**, which has an opinionated default set of properties. + The definition of the operation will also have functions that will define how to request new information + about the pending operation, how to request its cancellation, and how to serialize it. +- A **Poller** is an object who's main function is to interact with an operation until the poller is manually stopped, + the operation finishes (either by succeeding or failing) or if a manual request to cancel the operation has succeeded. + Some characteristics of the pollers are: + - Pollers show the status of the polling behavior. + - Pollers support manual as well as automatic polling. + - Pollers are serializable and can resume from a serialized operation. + - Pollers also specify how much of the operation's state is going to be available to the public. +- A **PollerLike** is the public interface of a Poller, which allows for different implementations to be used. + +## Examples + +You will be able to find some working examples of an implementation of an operation and a poller in: + +- [The `@azure/core-lro` samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro/samples). +- [The `@azure/core-lro` tests](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-lro/test). + +## Troubleshooting + +### Logging + +Logs can be added at the discretion of the library implementing the Long Running Operation poller. +Packages inside of [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) use +[@azure/logger](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger). + +## Next steps + +Please take a look at the [samples](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/samples) directory for detailed examples on how to use this library. + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +### Testing + +To run our tests, first install the dependencies (with `npm install` or `rush install`), +then run the unit tests with: `npm run unit-test`. + +### Code of Conduct + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-lro%2FREADME.png) diff --git a/node_modules/@azure/core-lro/dist-esm/src/index.js b/node_modules/@azure/core-lro/dist-esm/src/index.js new file mode 100644 index 0000000000..b3cb487415 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/index.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./pollOperation"; +export * from "./poller"; +export * from "./lroEngine"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/index.js.map b/node_modules/@azure/core-lro/dist-esm/src/index.js.map new file mode 100644 index 0000000000..45a9ab1b12 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,iBAAiB,CAAC;AAChC,cAAc,UAAU,CAAC;AACzB,cAAc,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./pollOperation\";\nexport * from \"./poller\";\nexport * from \"./lroEngine\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js new file mode 100644 index 0000000000..301e7ec7a8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { failureStates, successStates, } from "./models"; +import { isUnexpectedPollingResponse } from "./requestUtils"; +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return typeof state === "string" ? state.toLowerCase() : "succeeded"; +} +export function isBodyPollingDone(rawResponse) { + const state = getProvisioningState(rawResponse); + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +export function processBodyPollingOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); +} +//# sourceMappingURL=bodyPolling.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map new file mode 100644 index 0000000000..8a06d124f1 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/bodyPolling.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bodyPolling.js","sourceRoot":"","sources":["../../../src/lroEngine/bodyPolling.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAKL,aAAa,EACb,aAAa,GACd,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAE7D,SAAS,oBAAoB,CAAC,WAAwB;;IACpD,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAG,MAAC,WAAW,CAAC,IAAgB,mCAAI,EAAE,CAAC;IAC9E,MAAM,KAAK,GAAuB,MAAA,UAAU,aAAV,UAAU,uBAAV,UAAU,CAAE,iBAAiB,mCAAI,iBAAiB,CAAC;IACrF,OAAO,OAAO,KAAK,KAAK,QAAQ,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;AACvE,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,WAAwB;IACxD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;QAC7E,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,GAAG,CAAC,CAAC;KAC7F;IACD,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iCAAiC,CAC/C,QAA8B;IAE9B,uCACK,QAAQ,KACX,IAAI,EAAE,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,IAC7C;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroBody,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction getProvisioningState(rawResponse: RawResponse): string {\n const { properties, provisioningState } = (rawResponse.body as LroBody) ?? {};\n const state: string | undefined = properties?.provisioningState ?? provisioningState;\n return typeof state === \"string\" ? state.toLowerCase() : \"succeeded\";\n}\n\nexport function isBodyPollingDone(rawResponse: RawResponse): boolean {\n const state = getProvisioningState(rawResponse);\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Creates a polling strategy based on BodyPolling which uses the provisioning state\n * from the result to determine the current operation state\n */\nexport function processBodyPollingOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: isBodyPollingDone(response.rawResponse),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js new file mode 100644 index 0000000000..9a5cd7d1d6 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { LroEngine } from "./lroEngine"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map new file mode 100644 index 0000000000..1d9a2e5b6b --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/lroEngine/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { LroEngine } from \"./lroEngine\";\nexport {\n LroResourceLocationConfig,\n LongRunningOperation,\n LroResponse,\n LroEngineOptions,\n RawResponse,\n} from \"./models\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js new file mode 100644 index 0000000000..aac45a24fb --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { failureStates, successStates, } from "./models"; +import { isUnexpectedPollingResponse } from "./requestUtils"; +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +export function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; +} +//# sourceMappingURL=locationPolling.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map new file mode 100644 index 0000000000..8537ba52a7 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/locationPolling.js.map @@ -0,0 +1 @@ +{"version":3,"file":"locationPolling.js","sourceRoot":"","sources":["../../../src/lroEngine/locationPolling.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAOL,aAAa,EACb,aAAa,GACd,MAAM,UAAU,CAAC;AAClB,OAAO,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAE7D,SAAS,aAAa,CAAC,WAAwB;;IAC7C,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,WAAW,CAAC,UAAU,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IACD,MAAM,EAAE,MAAM,EAAE,GAAG,MAAC,WAAW,CAAC,IAAgB,mCAAI,EAAE,CAAC;IACvD,MAAM,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,WAAW,CAAC;IAC9E,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;QAC7E,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,GAAG,CAAC,CAAC;KAC7F;IACD,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;GAEG;AACH,KAAK,UAAU,gBAAgB,CAC7B,GAAkC,EAClC,gBAAwB,EACxB,yBAAqD;IAErD,QAAQ,yBAAyB,EAAE;QACjC,KAAK,cAAc;YACjB,OAAO,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;QAC9C,KAAK,uBAAuB;YAC1B,OAAO,SAAS,CAAC;QACnB,KAAK,UAAU,CAAC;QAChB;YACE,OAAO,GAAG,CAAC,eAAe,CAAC,gBAAgB,aAAhB,gBAAgB,cAAhB,gBAAgB,GAAI,GAAG,CAAC,WAAW,CAAC,CAAC;KACnE;AACH,CAAC;AAED,MAAM,UAAU,qCAAqC,CACnD,GAAkC,EAClC,gBAAyB,EACzB,yBAAqD;IAErD,OAAO,CAAC,QAA8B,EAAsB,EAAE;QAC5D,IAAI,aAAa,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;YACvC,IAAI,gBAAgB,KAAK,SAAS,EAAE;gBAClC,uCAAY,QAAQ,KAAE,IAAI,EAAE,IAAI,IAAG;aACpC;iBAAM;gBACL,uCACK,QAAQ,KACX,IAAI,EAAE,KAAK,EACX,IAAI,EAAE,KAAK,IAAI,EAAE;wBACf,MAAM,aAAa,GAAG,MAAM,gBAAgB,CAC1C,GAAG,EACH,gBAAgB,EAChB,yBAAyB,CAC1B,CAAC;wBACF,uCACK,CAAC,aAAa,aAAb,aAAa,cAAb,aAAa,GAAI,QAAQ,CAAC,KAC9B,IAAI,EAAE,IAAI,IACV;oBACJ,CAAC,IACD;aACH;SACF;QACD,uCACK,QAAQ,KACX,IAAI,EAAE,KAAK,IACX;IACJ,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroBody,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction isPollingDone(rawResponse: RawResponse): boolean {\n if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) {\n return false;\n }\n const { status } = (rawResponse.body as LroBody) ?? {};\n const state = typeof status === \"string\" ? status.toLowerCase() : \"succeeded\";\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Sends a request to the URI of the provisioned resource if needed.\n */\nasync function sendFinalRequest(\n lro: LongRunningOperation,\n resourceLocation: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): Promise | undefined> {\n switch (lroResourceLocationConfig) {\n case \"original-uri\":\n return lro.sendPollRequest(lro.requestPath);\n case \"azure-async-operation\":\n return undefined;\n case \"location\":\n default:\n return lro.sendPollRequest(resourceLocation ?? lro.requestPath);\n }\n}\n\nexport function processLocationPollingOperationResult(\n lro: LongRunningOperation,\n resourceLocation?: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): (response: LroResponse) => LroStatus {\n return (response: LroResponse): LroStatus => {\n if (isPollingDone(response.rawResponse)) {\n if (resourceLocation === undefined) {\n return { ...response, done: true };\n } else {\n return {\n ...response,\n done: false,\n next: async () => {\n const finalResponse = await sendFinalRequest(\n lro,\n resourceLocation,\n lroResourceLocationConfig\n );\n return {\n ...(finalResponse ?? response),\n done: true,\n };\n },\n };\n }\n }\n return {\n ...response,\n done: false,\n };\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js new file mode 100644 index 0000000000..094bfe4214 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +export const logger = createClientLogger("core-lro"); +//# sourceMappingURL=logger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map new file mode 100644 index 0000000000..161d8f714d --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/logger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logger.js","sourceRoot":"","sources":["../../../src/lroEngine/logger.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;;GAGG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js new file mode 100644 index 0000000000..a25411fd00 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { GenericPollOperation } from "./operation"; +import { Poller } from "../poller"; +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); + } +} +/** + * The LRO Engine, a class that performs polling. + */ +export class LroEngine extends Poller { + constructor(lro, options) { + const { intervalInMs = 2000, resumeFrom } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + super(operation); + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} +//# sourceMappingURL=lroEngine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map new file mode 100644 index 0000000000..3218c6958e --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/lroEngine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"lroEngine.js","sourceRoot":"","sources":["../../../src/lroEngine/lroEngine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAQlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,aAAa,CAAC;AAEnD,OAAO,EAAE,MAAM,EAAE,MAAM,WAAW,CAAC;AAEnC,SAAS,gBAAgB,CACvB,eAAuB;IAEvB,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;KAC1C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,IAAI,KAAK,CAAC,2CAA2C,eAAe,EAAE,CAAC,CAAC;KAC/E;AACH,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,SAA+D,SAAQ,MAGnF;IAGC,YAAY,GAAkC,EAAE,OAA2C;QACzF,MAAM,EAAE,YAAY,GAAG,IAAI,EAAE,UAAU,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAkD,UAAU;YACrE,CAAC,CAAC,gBAAgB,CAAC,UAAU,CAAC;YAC9B,CAAC,CAAE,EAAoD,CAAC;QAE1D,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,yBAAyB,EAClC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,EACtB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,CAChB,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;QAC7C,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IACzC,CAAC;IAED;;OAEG;IACH,KAAK;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,UAAU,CAAC,GAAG,EAAE,CAAC,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;IACzF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroEngineOptions,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { GenericPollOperation } from \"./operation\";\nimport { PollOperationState } from \"../pollOperation\";\nimport { Poller } from \"../poller\";\n\nfunction deserializeState(\n serializedState: string\n): TState & ResumablePollOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`);\n }\n}\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const { intervalInMs = 2000, resumeFrom } = options || {};\n const state: TState & ResumablePollOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as TState & ResumablePollOperationState);\n\n const operation = new GenericPollOperation(\n state,\n lro,\n options?.lroResourceLocationConfig,\n options?.processResult,\n options?.updateState,\n options?.isDone\n );\n super(operation);\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js new file mode 100644 index 0000000000..fb9667db45 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const successStates = ["succeeded"]; +export const failureStates = ["failed", "canceled", "cancelled"]; +/** + * The LRO states that signal that the LRO has completed. + */ +export const terminalStates = successStates.concat(failureStates); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map new file mode 100644 index 0000000000..b40cc57c92 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/lroEngine/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkClC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,WAAW,CAAC,CAAC;AAC3C,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,QAAQ,EAAE,UAAU,EAAE,WAAW,CAAC,CAAC;AACjE;;GAEG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,aAAa,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperationState } from \"../pollOperation\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n}\n\nexport const successStates = [\"succeeded\"];\nexport const failureStates = [\"failed\", \"canceled\", \"cancelled\"];\n/**\n * The LRO states that signal that the LRO has completed.\n */\nexport const terminalStates = successStates.concat(failureStates);\n\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface LroBody extends Record {\n /** The status of the operation. */\n status?: string;\n /** The state of the provisioning process */\n provisioningState?: string;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: string } & Record;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/** The type of which LRO implementation being followed by a specific API. */\nexport type LroMode = \"Location\" | \"Body\";\n\n/**\n * The configuration of a LRO to determine how to perform polling and checking whether the operation has completed.\n */\nexport interface LroConfig {\n /** The LRO mode */\n mode?: LroMode;\n /** The path of a provisioned resource */\n resourceLocation?: string;\n}\n\n/**\n * Type of a polling operation state that can actually be resumed.\n */\nexport type ResumablePollOperationState = PollOperationState & {\n initialRawResponse?: RawResponse;\n config?: LroConfig;\n pollingURL?: string;\n};\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n\n/**\n * The type of a terminal state of an LRO.\n */\nexport interface LroTerminalState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: true;\n}\n\n/**\n * The type of an in-progress state of an LRO.\n */\nexport interface LroInProgressState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: false;\n /**\n * The request to be sent next if it is different from the standard polling one.\n * Notice that it will disregard any polling URLs provided to it.\n */\n next?: () => Promise>;\n}\n\n/**\n * The type of an LRO state which is a tagged union of terminal and in-progress states.\n */\nexport type LroStatus = LroTerminalState | LroInProgressState;\n\n/**\n * The type of the getLROStatusFromResponse method. It takes the response as input and returns along the response whether the operation has finished.\n */\nexport type GetLroStatusFromResponse = (response: LroResponse) => LroStatus;\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path.\n */\n requestPath: string;\n /**\n * The HTTP request method.\n */\n requestMethod: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (path: string) => Promise>;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js new file mode 100644 index 0000000000..c4d29f9a97 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createGetLroStatusFromResponse, createInitializeState, createPoll } from "./stateMachine"; +import { getPollingUrl } from "./requestUtils"; +import { logger } from "./logger"; +export class GenericPollOperation { + constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + /** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ + async update(options) { + var _a, _b, _c; + const state = this.state; + let lastResponse = undefined; + if (!state.isStarted) { + const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); + lastResponse = await this.lro.sendInitialRequest(); + initializeState(lastResponse); + } + if (!state.isCompleted) { + if (!this.poll || !this.getLroStatusFromResponse) { + if (!state.config) { + throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); + } + const isDone = this.isDone; + this.getLroStatusFromResponse = isDone + ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) + : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); + this.poll = createPoll(this.lro); + } + if (!state.pollingURL) { + throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); + } + const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); + logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); + if (currentState.done) { + state.result = this.processResult + ? this.processResult(currentState.flatResponse, state) + : currentState.flatResponse; + state.isCompleted = true; + } + else { + this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; + state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); + } + lastResponse = currentState; + } + logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); + if (lastResponse) { + (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); + } + else { + logger.error(`LRO: no response was received`); + } + (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); + return this; + } + async cancel() { + this.state.isCancelled = true; + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} +//# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map new file mode 100644 index 0000000000..4843c6c69f --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/operation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operation.js","sourceRoot":"","sources":["../../../src/lroEngine/operation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAalC,OAAO,EAAE,8BAA8B,EAAE,qBAAqB,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAEnG,OAAO,EAAE,aAAa,EAAE,MAAM,gBAAgB,CAAC;AAC/C,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAElC,MAAM,OAAO,oBAAoB;IAW/B,YACS,KAAoD,EACnD,GAAkC,EAClC,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D;QAL3D,UAAK,GAAL,KAAK,CAA+C;QACnD,QAAG,GAAH,GAAG,CAA+B;QAClC,8BAAyB,GAAzB,yBAAyB,CAA4B;QACrD,kBAAa,GAAb,aAAa,CAA8C;QAC3D,gBAAW,GAAX,WAAW,CAAqD;QAChE,WAAM,GAAN,MAAM,CAAoD;IACjE,CAAC;IAEG,eAAe,CAAC,YAA0B;QAC/C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,KAAK,CAAC,MAAM,CAAC,OAGZ;;QACC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;QACzB,IAAI,YAAY,GAAqC,SAAS,CAAC;QAC/D,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;YACpB,MAAM,eAAe,GAAG,qBAAqB,CAC3C,KAAK,EACL,IAAI,CAAC,GAAG,CAAC,WAAW,EACpB,IAAI,CAAC,GAAG,CAAC,aAAa,CACvB,CAAC;YACF,YAAY,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,kBAAkB,EAAE,CAAC;YACnD,eAAe,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,wBAAwB,EAAE;gBAChD,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE;oBACjB,MAAM,IAAI,KAAK,CACb,wFAAwF,CACzF,CAAC;iBACH;gBACD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;gBAC3B,IAAI,CAAC,wBAAwB,GAAG,MAAM;oBACpC,CAAC,CAAC,CAAC,QAA8B,EAAE,EAAE,CAAC,iCAC/B,QAAQ,KACX,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,IAC/C;oBACJ,CAAC,CAAC,8BAA8B,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,yBAAyB,CAAC,CAAC;gBAC3F,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAClC;YACD,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;gBACrB,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;aACH;YACD,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,IAAI,CAClC,KAAK,CAAC,UAAU,EAChB,IAAI,CAAC,YAAa,EAClB,IAAI,CAAC,wBAAwB,CAC9B,CAAC;YACF,MAAM,CAAC,OAAO,CAAC,0BAA0B,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,WAAW,CAAC,EAAE,CAAC,CAAC;YACrF,IAAI,YAAY,CAAC,IAAI,EAAE;gBACrB,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,aAAa;oBAC/B,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,YAAY,EAAE,KAAK,CAAC;oBACtD,CAAC,CAAC,YAAY,CAAC,YAAY,CAAC;gBAC9B,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;iBAAM;gBACL,IAAI,CAAC,IAAI,GAAG,MAAA,YAAY,CAAC,IAAI,mCAAI,IAAI,CAAC,IAAI,CAAC;gBAC3C,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,YAAY,CAAC,WAAW,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;aAC9E;YACD,YAAY,GAAG,YAAY,CAAC;SAC7B;QACD,MAAM,CAAC,OAAO,CAAC,uBAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC/D,IAAI,YAAY,EAAE;YAChB,MAAA,IAAI,CAAC,WAAW,+CAAhB,IAAI,EAAe,KAAK,EAAE,YAAY,aAAZ,YAAY,uBAAZ,YAAY,CAAE,WAAW,CAAC,CAAC;SACtD;aAAM;YACL,MAAM,CAAC,KAAK,CAAC,+BAA+B,CAAC,CAAC;SAC/C;QACD,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,YAAY,+CAArB,OAAO,EAAiB,KAAK,CAAC,CAAC;QAC/B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,KAAK,CAAC,MAAM;QACV,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;QAC9B,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;OAEG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;SAClB,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n RawResponse,\n ResumablePollOperationState,\n} from \"./models\";\nimport { PollOperation, PollOperationState } from \"../pollOperation\";\nimport { createGetLroStatusFromResponse, createInitializeState, createPoll } from \"./stateMachine\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { getPollingUrl } from \"./requestUtils\";\nimport { logger } from \"./logger\";\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private poll?: (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ) => Promise>;\n private pollerConfig?: PollerConfig;\n private getLroStatusFromResponse?: GetLroStatusFromResponse;\n\n constructor(\n public state: TState & ResumablePollOperationState,\n private lro: LongRunningOperation,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n /**\n * General update function for LROPoller, the general process is as follows\n * 1. Check initial operation result to determine the strategy to use\n * - Strategies: Location, Azure-AsyncOperation, Original Uri\n * 2. Check if the operation result has a terminal state\n * - Terminal state will be determined by each strategy\n * 2.1 If it is terminal state Check if a final GET request is required, if so\n * send final GET request and return result from operation. If no final GET\n * is required, just return the result from operation.\n * - Determining what to call for final request is responsibility of each strategy\n * 2.2 If it is not terminal state, call the polling operation and go to step 1\n * - Determining what to call for polling is responsibility of each strategy\n * - Strategies will always use the latest URI for polling if provided otherwise\n * the last known one\n */\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const state = this.state;\n let lastResponse: LroResponse | undefined = undefined;\n if (!state.isStarted) {\n const initializeState = createInitializeState(\n state,\n this.lro.requestPath,\n this.lro.requestMethod\n );\n lastResponse = await this.lro.sendInitialRequest();\n initializeState(lastResponse);\n }\n\n if (!state.isCompleted) {\n if (!this.poll || !this.getLroStatusFromResponse) {\n if (!state.config) {\n throw new Error(\n \"Bad state: LRO mode is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const isDone = this.isDone;\n this.getLroStatusFromResponse = isDone\n ? (response: LroResponse) => ({\n ...response,\n done: isDone(response.flatResponse, this.state),\n })\n : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig);\n this.poll = createPoll(this.lro);\n }\n if (!state.pollingURL) {\n throw new Error(\n \"Bad state: polling URL is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const currentState = await this.poll(\n state.pollingURL,\n this.pollerConfig!,\n this.getLroStatusFromResponse\n );\n logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`);\n if (currentState.done) {\n state.result = this.processResult\n ? this.processResult(currentState.flatResponse, state)\n : currentState.flatResponse;\n state.isCompleted = true;\n } else {\n this.poll = currentState.next ?? this.poll;\n state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL);\n }\n lastResponse = currentState;\n }\n logger.verbose(`LRO: current state: ${JSON.stringify(state)}`);\n if (lastResponse) {\n this.updateState?.(state, lastResponse?.rawResponse);\n } else {\n logger.error(`LRO: no response was received`);\n }\n options?.fireProgress?.(state);\n return this;\n }\n\n async cancel(): Promise> {\n this.state.isCancelled = true;\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js new file mode 100644 index 0000000000..a5f21ae120 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function processPassthroughOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: true }); +} +//# sourceMappingURL=passthrough.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map new file mode 100644 index 0000000000..eb9068597b --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/passthrough.js.map @@ -0,0 +1 @@ +{"version":3,"file":"passthrough.js","sourceRoot":"","sources":["../../../src/lroEngine/passthrough.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,UAAU,iCAAiC,CAC/C,QAA8B;IAE9B,uCACK,QAAQ,KACX,IAAI,EAAE,IAAI,IACV;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResponse, LroStatus } from \"./models\";\n\nexport function processPassthroughOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: true,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js new file mode 100644 index 0000000000..4b5c072679 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Detects where the continuation token is and returns it. Notice that azure-asyncoperation + * must be checked first before the other location headers because there are scenarios + * where both azure-asyncoperation and location could be present in the same response but + * azure-asyncoperation should be the one to use for polling. + */ +export function getPollingUrl(rawResponse, defaultPath) { + var _a, _b, _c; + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); +} +function getLocation(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocation(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperation(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } + } +} +export function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || + getOperationLocation(rawResponse) !== undefined) { + return { + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", + }; + } + else if (["PUT", "PATCH"].includes(requestMethod)) { + return { + mode: "Body", + }; + } + return {}; +} +class SimpleRestError extends Error { + constructor(message, statusCode) { + super(message); + this.name = "RestError"; + this.statusCode = statusCode; + Object.setPrototypeOf(this, SimpleRestError.prototype); + } +} +export function isUnexpectedInitialResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![203, 204, 202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); + } + return false; +} +export function isUnexpectedPollingResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); + } + return false; +} +//# sourceMappingURL=requestUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map new file mode 100644 index 0000000000..80a36ccba5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/requestUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestUtils.js","sourceRoot":"","sources":["../../../src/lroEngine/requestUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC;;;;;GAKG;AACH,MAAM,UAAU,aAAa,CAAC,WAAwB,EAAE,WAAmB;;IACzE,OAAO,CACL,MAAA,MAAA,MAAA,sBAAsB,CAAC,WAAW,CAAC,mCACnC,oBAAoB,CAAC,WAAW,CAAC,mCACjC,WAAW,CAAC,WAAW,CAAC,mCACxB,WAAW,CACZ,CAAC;AACJ,CAAC;AAED,SAAS,WAAW,CAAC,WAAwB;IAC3C,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB;IACpD,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,sBAAsB,CAAC,WAAwB;IACtD,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAC3B,aAAqB,EACrB,WAAwB,EACxB,WAAmB;IAEnB,QAAQ,aAAa,EAAE;QACrB,KAAK,KAAK,CAAC,CAAC;YACV,OAAO,WAAW,CAAC;SACpB;QACD,KAAK,MAAM,CAAC;QACZ,KAAK,OAAO,CAAC,CAAC;YACZ,OAAO,WAAW,CAAC,WAAW,CAAC,CAAC;SACjC;QACD,OAAO,CAAC,CAAC;YACP,OAAO,SAAS,CAAC;SAClB;KACF;AACH,CAAC;AAED,MAAM,UAAU,YAAY,CAC1B,WAAmB,EACnB,aAAqB,EACrB,WAAwB;IAExB,IACE,sBAAsB,CAAC,WAAW,CAAC,KAAK,SAAS;QACjD,oBAAoB,CAAC,WAAW,CAAC,KAAK,SAAS,EAC/C;QACA,OAAO;YACL,IAAI,EAAE,UAAU;YAChB,gBAAgB,EAAE,oBAAoB,CAAC,aAAa,EAAE,WAAW,EAAE,WAAW,CAAC;SAChF,CAAC;KACH;SAAM,IAAI,WAAW,CAAC,WAAW,CAAC,KAAK,SAAS,EAAE;QACjD,OAAO;YACL,IAAI,EAAE,UAAU;SACjB,CAAC;KACH;SAAM,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,EAAE;QACnD,OAAO;YACL,IAAI,EAAE,MAAM;SACb,CAAC;KACH;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,MAAM,eAAgB,SAAQ,KAAK;IAEjC,YAAY,OAAe,EAAE,UAAkB;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;QACxB,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAC;IACzD,CAAC;CACF;AAED,MAAM,UAAU,2BAA2B,CAAC,WAAwB;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;IACpC,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QAClD,MAAM,IAAI,eAAe,CACvB,wCAAwC,IAAI,6DAA6D,EACzG,IAAI,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,2BAA2B,CAAC,WAAwB;IAClE,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;IACpC,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QACxC,MAAM,IAAI,eAAe,CACvB,wCAAwC,IAAI,mDAAmD,EAC/F,IAAI,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroConfig, RawResponse } from \"./models\";\n\n/**\n * Detects where the continuation token is and returns it. Notice that azure-asyncoperation\n * must be checked first before the other location headers because there are scenarios\n * where both azure-asyncoperation and location could be present in the same response but\n * azure-asyncoperation should be the one to use for polling.\n */\nexport function getPollingUrl(rawResponse: RawResponse, defaultPath: string): string {\n return (\n getAzureAsyncOperation(rawResponse) ??\n getOperationLocation(rawResponse) ??\n getLocation(rawResponse) ??\n defaultPath\n );\n}\n\nfunction getLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(\n requestMethod: string,\n rawResponse: RawResponse,\n requestPath: string\n): string | undefined {\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"POST\":\n case \"PATCH\": {\n return getLocation(rawResponse);\n }\n default: {\n return undefined;\n }\n }\n}\n\nexport function inferLroMode(\n requestPath: string,\n requestMethod: string,\n rawResponse: RawResponse\n): LroConfig {\n if (\n getAzureAsyncOperation(rawResponse) !== undefined ||\n getOperationLocation(rawResponse) !== undefined\n ) {\n return {\n mode: \"Location\",\n resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath),\n };\n } else if (getLocation(rawResponse) !== undefined) {\n return {\n mode: \"Location\",\n };\n } else if ([\"PUT\", \"PATCH\"].includes(requestMethod)) {\n return {\n mode: \"Body\",\n };\n }\n return {};\n}\n\nclass SimpleRestError extends Error {\n public statusCode?: number;\n constructor(message: string, statusCode: number) {\n super(message);\n this.name = \"RestError\";\n this.statusCode = statusCode;\n\n Object.setPrototypeOf(this, SimpleRestError.prototype);\n }\n}\n\nexport function isUnexpectedInitialResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![203, 204, 202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n\nexport function isUnexpectedPollingResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js new file mode 100644 index 0000000000..90541c6920 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from "./requestUtils"; +import { isBodyPollingDone, processBodyPollingOperationResult } from "./bodyPolling"; +import { logger } from "./logger"; +import { processLocationPollingOperationResult } from "./locationPolling"; +import { processPassthroughOperationResult } from "./passthrough"; +/** + * creates a stepping function that maps an LRO state to another. + */ +export function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { + switch (config.mode) { + case "Location": { + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); + } + case "Body": { + return processBodyPollingOperationResult; + } + default: { + return processPassthroughOperationResult; + } + } +} +/** + * Creates a polling operation. + */ +export function createPoll(lroPrimitives) { + return async (path, pollerConfig, getLroStatusFromResponse) => { + const response = await lroPrimitives.sendPollRequest(path); + const retryAfter = response.rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) + : retryAfterInSeconds * 1000; + } + return getLroStatusFromResponse(response); + }; +} +function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return defaultIntervalInMs; +} +/** + * Creates a callback to be used to initialize the polling operation state. + * @param state - of the polling operation + * @param operationSpec - of the LRO + * @param callback - callback to be called when the operation is done + * @returns callback that initializes the state of the polling operation + */ +export function createInitializeState(state, requestPath, requestMethod) { + return (response) => { + if (isUnexpectedInitialResponse(response.rawResponse)) + return true; + state.initialRawResponse = response.rawResponse; + state.isStarted = true; + state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); + state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); + /** short circuit polling if body polling is done in the initial request */ + if (state.config.mode === undefined || + (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { + state.result = response.flatResponse; + state.isCompleted = true; + } + logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); + return Boolean(state.isCompleted); + }; +} +//# sourceMappingURL=stateMachine.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map new file mode 100644 index 0000000000..9ee42afa3d --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/lroEngine/stateMachine.js.map @@ -0,0 +1 @@ +{"version":3,"file":"stateMachine.js","sourceRoot":"","sources":["../../../src/lroEngine/stateMachine.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAYlC,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,2BAA2B,EAAE,MAAM,gBAAgB,CAAC;AAC1F,OAAO,EAAE,iBAAiB,EAAE,iCAAiC,EAAE,MAAM,eAAe,CAAC;AACrF,OAAO,EAAE,MAAM,EAAE,MAAM,UAAU,CAAC;AAClC,OAAO,EAAE,qCAAqC,EAAE,MAAM,mBAAmB,CAAC;AAC1E,OAAO,EAAE,iCAAiC,EAAE,MAAM,eAAe,CAAC;AAElE;;GAEG;AACH,MAAM,UAAU,8BAA8B,CAC5C,aAA4C,EAC5C,MAAiB,EACjB,yBAAqD;IAErD,QAAQ,MAAM,CAAC,IAAI,EAAE;QACnB,KAAK,UAAU,CAAC,CAAC;YACf,OAAO,qCAAqC,CAC1C,aAAa,EACb,MAAM,CAAC,gBAAgB,EACvB,yBAAyB,CAC1B,CAAC;SACH;QACD,KAAK,MAAM,CAAC,CAAC;YACX,OAAO,iCAAiC,CAAC;SAC1C;QACD,OAAO,CAAC,CAAC;YACP,OAAO,iCAAiC,CAAC;SAC1C;KACF;AACH,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,UAAU,CACxB,aAA4C;IAM5C,OAAO,KAAK,EACV,IAAY,EACZ,YAA0B,EAC1B,wBAA2D,EAC9B,EAAE;QAC/B,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAuB,QAAQ,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;QACnF,IAAI,UAAU,KAAK,SAAS,EAAE;YAC5B,wEAAwE;YACxE,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;YACjD,YAAY,CAAC,YAAY,GAAG,KAAK,CAAC,mBAAmB,CAAC;gBACpD,CAAC,CAAC,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,YAAY,CAAC;gBACnF,CAAC,CAAC,mBAAmB,GAAG,IAAI,CAAC;SAChC;QACD,OAAO,wBAAwB,CAAC,QAAQ,CAAC,CAAC;IAC5C,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,gCAAgC,CACvC,cAAoB,EACpB,mBAA2B;IAE3B,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;IACjD,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE;QAC5B,OAAO,cAAc,GAAG,OAAO,CAAC;KACjC;IACD,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,qBAAqB,CACnC,KAA2C,EAC3C,WAAmB,EACnB,aAAqB;IAErB,OAAO,CAAC,QAA8B,EAAW,EAAE;QACjD,IAAI,2BAA2B,CAAC,QAAQ,CAAC,WAAW,CAAC;YAAE,OAAO,IAAI,CAAC;QACnE,KAAK,CAAC,kBAAkB,GAAG,QAAQ,CAAC,WAAW,CAAC;QAChD,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;QACxE,KAAK,CAAC,MAAM,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC,kBAAkB,CAAC,CAAC;QAClF,2EAA2E;QAC3E,IACE,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,SAAS;YAC/B,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,MAAM,IAAI,iBAAiB,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,EAC7E;YACA,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC,YAAuB,CAAC;YAChD,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;QACD,MAAM,CAAC,OAAO,CAAC,uBAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QAC/D,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;IACpC,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroConfig,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from \"./requestUtils\";\nimport { isBodyPollingDone, processBodyPollingOperationResult } from \"./bodyPolling\";\nimport { logger } from \"./logger\";\nimport { processLocationPollingOperationResult } from \"./locationPolling\";\nimport { processPassthroughOperationResult } from \"./passthrough\";\n\n/**\n * creates a stepping function that maps an LRO state to another.\n */\nexport function createGetLroStatusFromResponse(\n lroPrimitives: LongRunningOperation,\n config: LroConfig,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): GetLroStatusFromResponse {\n switch (config.mode) {\n case \"Location\": {\n return processLocationPollingOperationResult(\n lroPrimitives,\n config.resourceLocation,\n lroResourceLocationConfig\n );\n }\n case \"Body\": {\n return processBodyPollingOperationResult;\n }\n default: {\n return processPassthroughOperationResult;\n }\n }\n}\n\n/**\n * Creates a polling operation.\n */\nexport function createPoll(\n lroPrimitives: LongRunningOperation\n): (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n) => Promise> {\n return async (\n path: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ): Promise> => {\n const response = await lroPrimitives.sendPollRequest(path);\n const retryAfter: string | undefined = response.rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n pollerConfig.intervalInMs = isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs)\n : retryAfterInSeconds * 1000;\n }\n return getLroStatusFromResponse(response);\n };\n}\n\nfunction calculatePollingIntervalFromDate(\n retryAfterDate: Date,\n defaultIntervalInMs: number\n): number {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return defaultIntervalInMs;\n}\n\n/**\n * Creates a callback to be used to initialize the polling operation state.\n * @param state - of the polling operation\n * @param operationSpec - of the LRO\n * @param callback - callback to be called when the operation is done\n * @returns callback that initializes the state of the polling operation\n */\nexport function createInitializeState(\n state: ResumablePollOperationState,\n requestPath: string,\n requestMethod: string\n): (response: LroResponse) => boolean {\n return (response: LroResponse): boolean => {\n if (isUnexpectedInitialResponse(response.rawResponse)) return true;\n state.initialRawResponse = response.rawResponse;\n state.isStarted = true;\n state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath);\n state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse);\n /** short circuit polling if body polling is done in the initial request */\n if (\n state.config.mode === undefined ||\n (state.config.mode === \"Body\" && isBodyPollingDone(state.initialRawResponse))\n ) {\n state.result = response.flatResponse as TResult;\n state.isCompleted = true;\n }\n logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`);\n return Boolean(state.isCompleted);\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js new file mode 100644 index 0000000000..f8c1767df8 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=pollOperation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map new file mode 100644 index 0000000000..96f166b890 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/pollOperation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pollOperation.js","sourceRoot":"","sources":["../../src/pollOperation.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * PollOperationState contains an opinionated list of the smallest set of properties needed\n * to define any long running operation poller.\n *\n * While the Poller class works as the local control mechanism to start triggering, wait for,\n * and potentially cancel a long running operation, the PollOperationState documents the status\n * of the remote long running operation.\n *\n * It should be updated at least when the operation starts, when it's finished, and when it's cancelled.\n * Though, implementations can have any other number of properties that can be updated by other reasons.\n */\nexport interface PollOperationState {\n /**\n * True if the operation has started.\n */\n isStarted?: boolean;\n /**\n * True if the operation has been completed.\n */\n isCompleted?: boolean;\n /**\n * True if the operation has been cancelled.\n */\n isCancelled?: boolean;\n /**\n * Will exist if the operation encountered any error.\n */\n error?: Error;\n /**\n * Will exist if the operation concluded in a result of an expected type.\n */\n result?: TResult;\n}\n\n/**\n * PollOperation is an interface that defines how to update the local reference of the state of the remote\n * long running operation, just as well as how to request the cancellation of the same operation.\n *\n * It also has a method to serialize the operation so that it can be stored and resumed at any time.\n */\nexport interface PollOperation {\n /**\n * The state of the operation.\n * It will be used to store the basic properties of PollOperationState,\n * plus any custom property that the implementation may require.\n */\n state: TState;\n\n /**\n * Defines how to request the remote service for updates on the status of the long running operation.\n *\n * It optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n * Also optionally receives a \"fireProgress\" function, which, if called, is responsible for triggering the\n * poller's onProgress callbacks.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise>;\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * It returns a promise that should be resolved with an updated version of the poller's operation.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n cancel(options?: { abortSignal?: AbortSignalLike }): Promise>;\n\n /**\n * Serializes the operation.\n * Useful when wanting to create a poller that monitors an existing operation.\n */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/poller.js b/node_modules/@azure/core-lro/dist-esm/src/poller.js new file mode 100644 index 0000000000..bdb550a4c2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/poller.js @@ -0,0 +1,390 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +export class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +export class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling() { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + try { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + } + } + catch (e) { + this.operation.state.error = e; + if (this.reject) { + this.reject(e); + } + throw e; + } + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone() { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.stopped) { + this.stopped = true; + } + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} +//# sourceMappingURL=poller.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist-esm/src/poller.js.map b/node_modules/@azure/core-lro/dist-esm/src/poller.js.map new file mode 100644 index 0000000000..ea5abb95f5 --- /dev/null +++ b/node_modules/@azure/core-lro/dist-esm/src/poller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"poller.js","sourceRoot":"","sources":["../../src/poller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAoBlC;;;GAGG;AACH,MAAM,OAAO,kBAAmB,SAAQ,KAAK;IAC3C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;IAC5D,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,OAAO,oBAAqB,SAAQ,KAAK;IAC7C,YAAY,OAAe;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;QACf,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;IAC9D,CAAC;CACF;AA2DD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4DG;AACH,gDAAgD;AAChD,MAAM,OAAgB,MAAM;IAiB1B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAgEG;IACH,YAAY,SAAyC;QA/E7C,YAAO,GAAY,IAAI,CAAC;QAMxB,0BAAqB,GAAmC,EAAE,CAAC;QA0EjE,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,EAC1E,EAAE;YACF,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACvB,CAAC,CACF,CAAC;QACF,mFAAmF;QACnF,sFAAsF;QACtF,mFAAmF;QACnF,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,GAAG,EAAE;YACtB,yBAAyB;QAC3B,CAAC,CAAC,CAAC;IACL,CAAC;IAyBD;;;OAGG;IACK,KAAK,CAAC,YAAY;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;SACtB;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;YAC1C,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;YAClB,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;SACpB;IACH,CAAC;IAED;;;;;;;OAOG;IACK,KAAK,CAAC,QAAQ,CAAC,UAA6C,EAAE;QACpE,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;oBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;iBAC3C,CAAC,CAAC;gBACH,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE;oBACjC,uEAAuE;oBACvE,uEAAuE;oBACvE,oEAAoE;oBACpE,uEAAuE;oBACvE,cAAc;oBACd,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,MAAiB,CAAC,CAAC;iBACtD;aACF;SACF;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;aAChB;YACD,MAAM,CAAC,CAAC;SACT;IACH,CAAC;IAED;;;;;;;OAOG;IACK,YAAY,CAAC,KAAa;QAChC,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE;YACjD,QAAQ,CAAC,KAAK,CAAC,CAAC;SACjB;IACH,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,UAAU,CAAC,UAA6C,EAAE;QACtE,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACtD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC;SAC3D;IACH,CAAC;IAED;;;;;;;OAOG;IACI,IAAI,CAAC,UAA6C,EAAE;QACzD,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,GAAS,EAAE;gBACtC,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;YACnC,CAAC,CAAC;YACF,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC1F;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;IAC9B,CAAC;IAED;;OAEG;IACI,KAAK,CAAC,aAAa;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACxC;QACD,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;OAKG;IACI,UAAU,CAAC,QAAiC;QACjD,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAC1C,OAAO,GAAS,EAAE;YAChB,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC;QACxF,CAAC,CAAC;IACJ,CAAC;IAED;;OAEG;IACI,MAAM;QACX,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;IACxE,CAAC;IAED;;OAEG;IACI,WAAW;QAChB,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;aACvE;SACF;IACH,CAAC;IAED;;OAEG;IACI,SAAS;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;;;;;;OAQG;IACI,eAAe,CAAC,UAA6C,EAAE;QACpE,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;YACjB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;SACrB;QACD,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;SAC/C;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;SAC1D;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+CG;IACI,iBAAiB;QACtB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;IAC9B,CAAC;IAED;;;;;OAKG;IACI,SAAS;QACd,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;IACnC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n }\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-http\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll();\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n try {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.operation.state.result as TResult);\n }\n }\n } catch (e) {\n this.operation.state.error = e;\n if (this.reject) {\n this.reject(e);\n }\n throw e;\n }\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(): Promise {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/index.js b/node_modules/@azure/core-lro/dist/index.js new file mode 100644 index 0000000000..3f30fc9cb2 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/index.js @@ -0,0 +1,751 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var logger$1 = require('@azure/logger'); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +class PollerStoppedError extends Error { + constructor(message) { + super(message); + this.name = "PollerStoppedError"; + Object.setPrototypeOf(this, PollerStoppedError.prototype); + } +} +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +class PollerCancelledError extends Error { + constructor(message) { + super(message); + this.name = "PollerCancelledError"; + Object.setPrototypeOf(this, PollerCancelledError.prototype); + } +} +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +// eslint-disable-next-line no-use-before-define +class Poller { + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation) { + this.stopped = true; + this.pollProgressCallbacks = []; + this.operation = operation; + this.promise = new Promise((resolve, reject) => { + this.resolve = resolve; + this.reject = reject; + }); + // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. + // The above warning would get thrown if `poller.poll` is called, it returns an error, + // and pullUntilDone did not have a .catch or await try/catch on it's return value. + this.promise.catch(() => { + /* intentionally blank */ + }); + } + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + async startPolling() { + if (this.stopped) { + this.stopped = false; + } + while (!this.isStopped() && !this.isDone()) { + await this.poll(); + await this.delay(); + } + } + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + async pollOnce(options = {}) { + try { + if (!this.isDone()) { + this.operation = await this.operation.update({ + abortSignal: options.abortSignal, + fireProgress: this.fireProgress.bind(this), + }); + if (this.isDone() && this.resolve) { + // If the poller has finished polling, this means we now have a result. + // However, it can be the case that TResult is instantiated to void, so + // we are not expecting a result anyway. To assert that we might not + // have a result eventually after finishing polling, we cast the result + // to TResult. + this.resolve(this.operation.state.result); + } + } + } + catch (e) { + this.operation.state.error = e; + if (this.reject) { + this.reject(e); + } + throw e; + } + } + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + fireProgress(state) { + for (const callback of this.pollProgressCallbacks) { + callback(state); + } + } + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + async cancelOnce(options = {}) { + this.operation = await this.operation.cancel(options); + if (this.reject) { + this.reject(new PollerCancelledError("Poller cancelled")); + } + } + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options = {}) { + if (!this.pollOncePromise) { + this.pollOncePromise = this.pollOnce(options); + const clearPollOncePromise = () => { + this.pollOncePromise = undefined; + }; + this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); + } + return this.pollOncePromise; + } + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + async pollUntilDone() { + if (this.stopped) { + this.startPolling().catch(this.reject); + } + return this.promise; + } + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback) { + this.pollProgressCallbacks.push(callback); + return () => { + this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); + }; + } + /** + * Returns true if the poller has finished polling. + */ + isDone() { + const state = this.operation.state; + return Boolean(state.isCompleted || state.isCancelled || state.error); + } + /** + * Stops the poller from continuing to poll. + */ + stopPolling() { + if (!this.stopped) { + this.stopped = true; + if (this.reject) { + this.reject(new PollerStoppedError("This poller is already stopped")); + } + } + } + /** + * Returns true if the poller is stopped. + */ + isStopped() { + return this.stopped; + } + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options = {}) { + if (!this.stopped) { + this.stopped = true; + } + if (!this.cancelPromise) { + this.cancelPromise = this.cancelOnce(options); + } + else if (options.abortSignal) { + throw new Error("A cancel request is currently pending"); + } + return this.cancelPromise; + } + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState() { + return this.operation.state; + } + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult() { + const state = this.operation.state; + return state.result; + } + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString() { + return this.operation.toString(); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Detects where the continuation token is and returns it. Notice that azure-asyncoperation + * must be checked first before the other location headers because there are scenarios + * where both azure-asyncoperation and location could be present in the same response but + * azure-asyncoperation should be the one to use for polling. + */ +function getPollingUrl(rawResponse, defaultPath) { + var _a, _b, _c; + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); +} +function getLocation(rawResponse) { + return rawResponse.headers["location"]; +} +function getOperationLocation(rawResponse) { + return rawResponse.headers["operation-location"]; +} +function getAzureAsyncOperation(rawResponse) { + return rawResponse.headers["azure-asyncoperation"]; +} +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } + } +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || + getOperationLocation(rawResponse) !== undefined) { + return { + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", + }; + } + else if (["PUT", "PATCH"].includes(requestMethod)) { + return { + mode: "Body", + }; + } + return {}; +} +class SimpleRestError extends Error { + constructor(message, statusCode) { + super(message); + this.name = "RestError"; + this.statusCode = statusCode; + Object.setPrototypeOf(this, SimpleRestError.prototype); + } +} +function isUnexpectedInitialResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![203, 204, 202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`, code); + } + return false; +} +function isUnexpectedPollingResponse(rawResponse) { + const code = rawResponse.statusCode; + if (![202, 201, 200, 500].includes(code)) { + throw new SimpleRestError(`Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`, code); + } + return false; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const successStates = ["succeeded"]; +const failureStates = ["failed", "canceled", "cancelled"]; + +// Copyright (c) Microsoft Corporation. +function getProvisioningState(rawResponse) { + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; + return typeof state === "string" ? state.toLowerCase() : "succeeded"; +} +function isBodyPollingDone(rawResponse) { + const state = getProvisioningState(rawResponse); + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Creates a polling strategy based on BodyPolling which uses the provisioning state + * from the result to determine the current operation state + */ +function processBodyPollingOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: isBodyPollingDone(response.rawResponse) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); +} +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function processPassthroughOperationResult(response) { + return Object.assign(Object.assign({}, response), { done: true }); +} + +// Copyright (c) Microsoft Corporation. +/** + * creates a stepping function that maps an LRO state to another. + */ +function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { + switch (config.mode) { + case "Location": { + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); + } + case "Body": { + return processBodyPollingOperationResult; + } + default: { + return processPassthroughOperationResult; + } + } +} +/** + * Creates a polling operation. + */ +function createPoll(lroPrimitives) { + return async (path, pollerConfig, getLroStatusFromResponse) => { + const response = await lroPrimitives.sendPollRequest(path); + const retryAfter = response.rawResponse.headers["retry-after"]; + if (retryAfter !== undefined) { + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) + ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) + : retryAfterInSeconds * 1000; + } + return getLroStatusFromResponse(response); + }; +} +function calculatePollingIntervalFromDate(retryAfterDate, defaultIntervalInMs) { + const timeNow = Math.floor(new Date().getTime()); + const retryAfterTime = retryAfterDate.getTime(); + if (timeNow < retryAfterTime) { + return retryAfterTime - timeNow; + } + return defaultIntervalInMs; +} +/** + * Creates a callback to be used to initialize the polling operation state. + * @param state - of the polling operation + * @param operationSpec - of the LRO + * @param callback - callback to be called when the operation is done + * @returns callback that initializes the state of the polling operation + */ +function createInitializeState(state, requestPath, requestMethod) { + return (response) => { + if (isUnexpectedInitialResponse(response.rawResponse)) + ; + state.initialRawResponse = response.rawResponse; + state.isStarted = true; + state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath); + state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse); + /** short circuit polling if body polling is done in the initial request */ + if (state.config.mode === undefined || + (state.config.mode === "Body" && isBodyPollingDone(state.initialRawResponse))) { + state.result = response.flatResponse; + state.isCompleted = true; + } + logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`); + return Boolean(state.isCompleted); + }; +} + +// Copyright (c) Microsoft Corporation. +class GenericPollOperation { + constructor(state, lro, lroResourceLocationConfig, processResult, updateState, isDone) { + this.state = state; + this.lro = lro; + this.lroResourceLocationConfig = lroResourceLocationConfig; + this.processResult = processResult; + this.updateState = updateState; + this.isDone = isDone; + } + setPollerConfig(pollerConfig) { + this.pollerConfig = pollerConfig; + } + /** + * General update function for LROPoller, the general process is as follows + * 1. Check initial operation result to determine the strategy to use + * - Strategies: Location, Azure-AsyncOperation, Original Uri + * 2. Check if the operation result has a terminal state + * - Terminal state will be determined by each strategy + * 2.1 If it is terminal state Check if a final GET request is required, if so + * send final GET request and return result from operation. If no final GET + * is required, just return the result from operation. + * - Determining what to call for final request is responsibility of each strategy + * 2.2 If it is not terminal state, call the polling operation and go to step 1 + * - Determining what to call for polling is responsibility of each strategy + * - Strategies will always use the latest URI for polling if provided otherwise + * the last known one + */ + async update(options) { + var _a, _b, _c; + const state = this.state; + let lastResponse = undefined; + if (!state.isStarted) { + const initializeState = createInitializeState(state, this.lro.requestPath, this.lro.requestMethod); + lastResponse = await this.lro.sendInitialRequest(); + initializeState(lastResponse); + } + if (!state.isCompleted) { + if (!this.poll || !this.getLroStatusFromResponse) { + if (!state.config) { + throw new Error("Bad state: LRO mode is undefined. Please check if the serialized state is well-formed."); + } + const isDone = this.isDone; + this.getLroStatusFromResponse = isDone + ? (response) => (Object.assign(Object.assign({}, response), { done: isDone(response.flatResponse, this.state) })) + : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig); + this.poll = createPoll(this.lro); + } + if (!state.pollingURL) { + throw new Error("Bad state: polling URL is undefined. Please check if the serialized state is well-formed."); + } + const currentState = await this.poll(state.pollingURL, this.pollerConfig, this.getLroStatusFromResponse); + logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`); + if (currentState.done) { + state.result = this.processResult + ? this.processResult(currentState.flatResponse, state) + : currentState.flatResponse; + state.isCompleted = true; + } + else { + this.poll = (_a = currentState.next) !== null && _a !== void 0 ? _a : this.poll; + state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL); + } + lastResponse = currentState; + } + logger.verbose(`LRO: current state: ${JSON.stringify(state)}`); + if (lastResponse) { + (_b = this.updateState) === null || _b === void 0 ? void 0 : _b.call(this, state, lastResponse === null || lastResponse === void 0 ? void 0 : lastResponse.rawResponse); + } + else { + logger.error(`LRO: no response was received`); + } + (_c = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _c === void 0 ? void 0 : _c.call(options, state); + return this; + } + async cancel() { + this.state.isCancelled = true; + return this; + } + /** + * Serializes the Poller operation. + */ + toString() { + return JSON.stringify({ + state: this.state, + }); + } +} + +// Copyright (c) Microsoft Corporation. +function deserializeState(serializedState) { + try { + return JSON.parse(serializedState).state; + } + catch (e) { + throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`); + } +} +/** + * The LRO Engine, a class that performs polling. + */ +class LroEngine extends Poller { + constructor(lro, options) { + const { intervalInMs = 2000, resumeFrom } = options || {}; + const state = resumeFrom + ? deserializeState(resumeFrom) + : {}; + const operation = new GenericPollOperation(state, lro, options === null || options === void 0 ? void 0 : options.lroResourceLocationConfig, options === null || options === void 0 ? void 0 : options.processResult, options === null || options === void 0 ? void 0 : options.updateState, options === null || options === void 0 ? void 0 : options.isDone); + super(operation); + this.config = { intervalInMs: intervalInMs }; + operation.setPollerConfig(this.config); + } + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay() { + return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); + } +} + +exports.LroEngine = LroEngine; +exports.Poller = Poller; +exports.PollerCancelledError = PollerCancelledError; +exports.PollerStoppedError = PollerStoppedError; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-lro/dist/index.js.map b/node_modules/@azure/core-lro/dist/index.js.map new file mode 100644 index 0000000000..9aa28f7914 --- /dev/null +++ b/node_modules/@azure/core-lro/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/poller.ts","../src/lroEngine/requestUtils.ts","../src/lroEngine/models.ts","../src/lroEngine/bodyPolling.ts","../src/lroEngine/logger.ts","../src/lroEngine/locationPolling.ts","../src/lroEngine/passthrough.ts","../src/lroEngine/stateMachine.ts","../src/lroEngine/operation.ts","../src/lroEngine/lroEngine.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperation, PollOperationState } from \"./pollOperation\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * CancelOnProgress is used as the return value of a Poller's onProgress method.\n * When a user invokes onProgress, they're required to pass in a function that will be\n * called as a callback with the new data received each time the poll operation is updated.\n * onProgress returns a function that will prevent any further update to reach the original callback.\n */\nexport type CancelOnProgress = () => void;\n\n/**\n * PollProgressCallback is the type of the callback functions sent to onProgress.\n * These functions will receive a TState that is defined by your implementation of\n * the Poller class.\n */\nexport type PollProgressCallback = (state: TState) => void;\n\n/**\n * When a poller is manually stopped through the `stopPolling` method,\n * the poller will be rejected with an instance of the PollerStoppedError.\n */\nexport class PollerStoppedError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerStoppedError\";\n Object.setPrototypeOf(this, PollerStoppedError.prototype);\n }\n}\n\n/**\n * When a poller is cancelled through the `cancelOperation` method,\n * the poller will be rejected with an instance of the PollerCancelledError.\n */\nexport class PollerCancelledError extends Error {\n constructor(message: string) {\n super(message);\n this.name = \"PollerCancelledError\";\n Object.setPrototypeOf(this, PollerCancelledError.prototype);\n }\n}\n\n/**\n * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with.\n */\n// eslint-disable-next-line no-use-before-define\nexport interface PollerLike, TResult> {\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n */\n poll(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n pollUntilDone(): Promise;\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n onProgress(callback: (state: TState) => void): CancelOnProgress;\n /**\n * Returns true if the poller has finished polling.\n */\n isDone(): boolean;\n /**\n * Stops the poller. After this, no manual or automated requests can be sent.\n */\n stopPolling(): void;\n /**\n * Returns true if the poller is stopped.\n */\n isStopped(): boolean;\n /**\n * Attempts to cancel the underlying operation.\n */\n cancelOperation(options?: { abortSignal?: AbortSignalLike }): Promise;\n /**\n * Returns the state of the operation.\n * The TState defined in PollerLike can be a subset of the TState defined in\n * the Poller implementation.\n */\n getOperationState(): TState;\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n getResult(): TResult | undefined;\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n toString(): string;\n}\n\n/**\n * A class that represents the definition of a program that polls through consecutive requests\n * until it reaches a state of completion.\n *\n * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed.\n * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes.\n * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation.\n *\n * ```ts\n * const poller = new MyPoller();\n *\n * // Polling just once:\n * await poller.poll();\n *\n * // We can try to cancel the request here, by calling:\n * //\n * // await poller.cancelOperation();\n * //\n *\n * // Getting the final result:\n * const result = await poller.pollUntilDone();\n * ```\n *\n * The Poller is defined by two types, a type representing the state of the poller, which\n * must include a basic set of properties from `PollOperationState`,\n * and a return type defined by `TResult`, which can be anything.\n *\n * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having\n * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type.\n *\n * ```ts\n * class Client {\n * public async makePoller: PollerLike {\n * const poller = new MyPoller({});\n * // It might be preferred to return the poller after the first request is made,\n * // so that some information can be obtained right away.\n * await poller.poll();\n * return poller;\n * }\n * }\n *\n * const poller: PollerLike = myClient.makePoller();\n * ```\n *\n * A poller can be created through its constructor, then it can be polled until it's completed.\n * At any point in time, the state of the poller can be obtained without delay through the getOperationState method.\n * At any point in time, the intermediate forms of the result type can be requested without delay.\n * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned.\n *\n * ```ts\n * const poller = myClient.makePoller();\n * const state: MyOperationState = poller.getOperationState();\n *\n * // The intermediate result can be obtained at any time.\n * const result: MyResult | undefined = poller.getResult();\n *\n * // The final result can only be obtained after the poller finishes.\n * const result: MyResult = await poller.pollUntilDone();\n * ```\n *\n */\n// eslint-disable-next-line no-use-before-define\nexport abstract class Poller, TResult>\n implements PollerLike\n{\n private stopped: boolean = true;\n private resolve?: (value: TResult) => void;\n private reject?: (error: PollerStoppedError | PollerCancelledError | Error) => void;\n private pollOncePromise?: Promise;\n private cancelPromise?: Promise;\n private promise: Promise;\n private pollProgressCallbacks: PollProgressCallback[] = [];\n\n /**\n * The poller's operation is available in full to any of the methods of the Poller class\n * and any class extending the Poller class.\n */\n protected operation: PollOperation;\n\n /**\n * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`.\n *\n * When writing an implementation of a Poller, this implementation needs to deal with the initialization\n * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's\n * operation has already been defined, at least its basic properties. The code below shows how to approach\n * the definition of the constructor of a new custom poller.\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor({\n * // Anything you might need outside of the basics\n * }) {\n * let state: MyOperationState = {\n * privateProperty: private,\n * publicProperty: public,\n * };\n *\n * const operation = {\n * state,\n * update,\n * cancel,\n * toString\n * }\n *\n * // Sending the operation to the parent's constructor.\n * super(operation);\n *\n * // You can assign more local properties here.\n * }\n * }\n * ```\n *\n * Inside of this constructor, a new promise is created. This will be used to\n * tell the user when the poller finishes (see `pollUntilDone()`). The promise's\n * resolve and reject methods are also used internally to control when to resolve\n * or reject anyone waiting for the poller to finish.\n *\n * The constructor of a custom implementation of a poller is where any serialized version of\n * a previous poller's operation should be deserialized into the operation sent to the\n * base constructor. For example:\n *\n * ```ts\n * export class MyPoller extends Poller {\n * constructor(\n * baseOperation: string | undefined\n * ) {\n * let state: MyOperationState = {};\n * if (baseOperation) {\n * state = {\n * ...JSON.parse(baseOperation).state,\n * ...state\n * };\n * }\n * const operation = {\n * state,\n * // ...\n * }\n * super(operation);\n * }\n * }\n * ```\n *\n * @param operation - Must contain the basic properties of `PollOperation`.\n */\n constructor(operation: PollOperation) {\n this.operation = operation;\n this.promise = new Promise(\n (\n resolve: (result: TResult) => void,\n reject: (error: PollerStoppedError | PollerCancelledError | Error) => void\n ) => {\n this.resolve = resolve;\n this.reject = reject;\n }\n );\n // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown.\n // The above warning would get thrown if `poller.poll` is called, it returns an error,\n // and pullUntilDone did not have a .catch or await try/catch on it's return value.\n this.promise.catch(() => {\n /* intentionally blank */\n });\n }\n\n /**\n * Defines how much to wait between each poll request.\n * This has to be implemented by your custom poller.\n *\n * \\@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified.\n * This can be used as follows:\n *\n * ```ts\n * import { delay } from \"@azure/core-http\";\n *\n * export class MyPoller extends Poller {\n * // The other necessary definitions.\n *\n * async delay(): Promise {\n * const milliseconds = 1000;\n * return delay(milliseconds);\n * }\n * }\n * ```\n *\n */\n protected abstract delay(): Promise;\n\n /**\n * Starts a loop that will break only if the poller is done\n * or if the poller is stopped.\n */\n private async startPolling(): Promise {\n if (this.stopped) {\n this.stopped = false;\n }\n while (!this.isStopped() && !this.isDone()) {\n await this.poll();\n await this.delay();\n }\n }\n\n /**\n * pollOnce does one polling, by calling to the update method of the underlying\n * poll operation to make any relevant change effective.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n private async pollOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n try {\n if (!this.isDone()) {\n this.operation = await this.operation.update({\n abortSignal: options.abortSignal,\n fireProgress: this.fireProgress.bind(this),\n });\n if (this.isDone() && this.resolve) {\n // If the poller has finished polling, this means we now have a result.\n // However, it can be the case that TResult is instantiated to void, so\n // we are not expecting a result anyway. To assert that we might not\n // have a result eventually after finishing polling, we cast the result\n // to TResult.\n this.resolve(this.operation.state.result as TResult);\n }\n }\n } catch (e) {\n this.operation.state.error = e;\n if (this.reject) {\n this.reject(e);\n }\n throw e;\n }\n }\n\n /**\n * fireProgress calls the functions passed in via onProgress the method of the poller.\n *\n * It loops over all of the callbacks received from onProgress, and executes them, sending them\n * the current operation state.\n *\n * @param state - The current operation state.\n */\n private fireProgress(state: TState): void {\n for (const callback of this.pollProgressCallbacks) {\n callback(state);\n }\n }\n\n /**\n * Invokes the underlying operation's cancel method, and rejects the\n * pollUntilDone promise.\n */\n private async cancelOnce(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n this.operation = await this.operation.cancel(options);\n if (this.reject) {\n this.reject(new PollerCancelledError(\"Poller cancelled\"));\n }\n }\n\n /**\n * Returns a promise that will resolve once a single polling request finishes.\n * It does this by calling the update method of the Poller's operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public poll(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.pollOncePromise) {\n this.pollOncePromise = this.pollOnce(options);\n const clearPollOncePromise = (): void => {\n this.pollOncePromise = undefined;\n };\n this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject);\n }\n return this.pollOncePromise;\n }\n\n /**\n * Returns a promise that will resolve once the underlying operation is completed.\n */\n public async pollUntilDone(): Promise {\n if (this.stopped) {\n this.startPolling().catch(this.reject);\n }\n return this.promise;\n }\n\n /**\n * Invokes the provided callback after each polling is completed,\n * sending the current state of the poller's operation.\n *\n * It returns a method that can be used to stop receiving updates on the given callback function.\n */\n public onProgress(callback: (state: TState) => void): CancelOnProgress {\n this.pollProgressCallbacks.push(callback);\n return (): void => {\n this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback);\n };\n }\n\n /**\n * Returns true if the poller has finished polling.\n */\n public isDone(): boolean {\n const state: PollOperationState = this.operation.state;\n return Boolean(state.isCompleted || state.isCancelled || state.error);\n }\n\n /**\n * Stops the poller from continuing to poll.\n */\n public stopPolling(): void {\n if (!this.stopped) {\n this.stopped = true;\n if (this.reject) {\n this.reject(new PollerStoppedError(\"This poller is already stopped\"));\n }\n }\n }\n\n /**\n * Returns true if the poller is stopped.\n */\n public isStopped(): boolean {\n return this.stopped;\n }\n\n /**\n * Attempts to cancel the underlying operation.\n *\n * It only optionally receives an object with an abortSignal property, from \\@azure/abort-controller's AbortSignalLike.\n *\n * If it's called again before it finishes, it will throw an error.\n *\n * @param options - Optional properties passed to the operation's update method.\n */\n public cancelOperation(options: { abortSignal?: AbortSignalLike } = {}): Promise {\n if (!this.stopped) {\n this.stopped = true;\n }\n if (!this.cancelPromise) {\n this.cancelPromise = this.cancelOnce(options);\n } else if (options.abortSignal) {\n throw new Error(\"A cancel request is currently pending\");\n }\n return this.cancelPromise;\n }\n\n /**\n * Returns the state of the operation.\n *\n * Even though TState will be the same type inside any of the methods of any extension of the Poller class,\n * implementations of the pollers can customize what's shared with the public by writing their own\n * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller\n * and a public type representing a safe to share subset of the properties of the internal state.\n * Their definition of getOperationState can then return their public type.\n *\n * Example:\n *\n * ```ts\n * // Let's say we have our poller's operation state defined as:\n * interface MyOperationState extends PollOperationState {\n * privateProperty?: string;\n * publicProperty?: string;\n * }\n *\n * // To allow us to have a true separation of public and private state, we have to define another interface:\n * interface PublicState extends PollOperationState {\n * publicProperty?: string;\n * }\n *\n * // Then, we define our Poller as follows:\n * export class MyPoller extends Poller {\n * // ... More content is needed here ...\n *\n * public getOperationState(): PublicState {\n * const state: PublicState = this.operation.state;\n * return {\n * // Properties from PollOperationState\n * isStarted: state.isStarted,\n * isCompleted: state.isCompleted,\n * isCancelled: state.isCancelled,\n * error: state.error,\n * result: state.result,\n *\n * // The only other property needed by PublicState.\n * publicProperty: state.publicProperty\n * }\n * }\n * }\n * ```\n *\n * You can see this in the tests of this repository, go to the file:\n * `../test/utils/testPoller.ts`\n * and look for the getOperationState implementation.\n */\n public getOperationState(): TState {\n return this.operation.state;\n }\n\n /**\n * Returns the result value of the operation,\n * regardless of the state of the poller.\n * It can return undefined or an incomplete form of the final TResult value\n * depending on the implementation.\n */\n public getResult(): TResult | undefined {\n const state: PollOperationState = this.operation.state;\n return state.result;\n }\n\n /**\n * Returns a serialized version of the poller's operation\n * by invoking the operation's toString method.\n */\n public toString(): string {\n return this.operation.toString();\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroConfig, RawResponse } from \"./models\";\n\n/**\n * Detects where the continuation token is and returns it. Notice that azure-asyncoperation\n * must be checked first before the other location headers because there are scenarios\n * where both azure-asyncoperation and location could be present in the same response but\n * azure-asyncoperation should be the one to use for polling.\n */\nexport function getPollingUrl(rawResponse: RawResponse, defaultPath: string): string {\n return (\n getAzureAsyncOperation(rawResponse) ??\n getOperationLocation(rawResponse) ??\n getLocation(rawResponse) ??\n defaultPath\n );\n}\n\nfunction getLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"location\"];\n}\n\nfunction getOperationLocation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"operation-location\"];\n}\n\nfunction getAzureAsyncOperation(rawResponse: RawResponse): string | undefined {\n return rawResponse.headers[\"azure-asyncoperation\"];\n}\n\nfunction findResourceLocation(\n requestMethod: string,\n rawResponse: RawResponse,\n requestPath: string\n): string | undefined {\n switch (requestMethod) {\n case \"PUT\": {\n return requestPath;\n }\n case \"POST\":\n case \"PATCH\": {\n return getLocation(rawResponse);\n }\n default: {\n return undefined;\n }\n }\n}\n\nexport function inferLroMode(\n requestPath: string,\n requestMethod: string,\n rawResponse: RawResponse\n): LroConfig {\n if (\n getAzureAsyncOperation(rawResponse) !== undefined ||\n getOperationLocation(rawResponse) !== undefined\n ) {\n return {\n mode: \"Location\",\n resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath),\n };\n } else if (getLocation(rawResponse) !== undefined) {\n return {\n mode: \"Location\",\n };\n } else if ([\"PUT\", \"PATCH\"].includes(requestMethod)) {\n return {\n mode: \"Body\",\n };\n }\n return {};\n}\n\nclass SimpleRestError extends Error {\n public statusCode?: number;\n constructor(message: string, statusCode: number) {\n super(message);\n this.name = \"RestError\";\n this.statusCode = statusCode;\n\n Object.setPrototypeOf(this, SimpleRestError.prototype);\n }\n}\n\nexport function isUnexpectedInitialResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![203, 204, 202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} in the initial response. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n\nexport function isUnexpectedPollingResponse(rawResponse: RawResponse): boolean {\n const code = rawResponse.statusCode;\n if (![202, 201, 200, 500].includes(code)) {\n throw new SimpleRestError(\n `Received unexpected HTTP status code ${code} while polling. This may indicate a server issue.`,\n code\n );\n }\n return false;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PollOperationState } from \"../pollOperation\";\n\n/**\n * Options for the LRO poller.\n */\nexport interface LroEngineOptions {\n /**\n * Defines how much time the poller is going to wait before making a new request to the service.\n */\n intervalInMs?: number;\n /**\n * A serialized poller which can be used to resume an existing paused Long-Running-Operation.\n */\n resumeFrom?: string;\n /**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\n lroResourceLocationConfig?: LroResourceLocationConfig;\n /**\n * A function to process the result of the LRO.\n */\n processResult?: (result: unknown, state: TState) => TResult;\n /**\n * A function to process the state of the LRO.\n */\n updateState?: (state: TState, lastResponse: RawResponse) => void;\n /**\n * A predicate to determine whether the LRO finished processing.\n */\n isDone?: (lastResponse: unknown, state: TState) => boolean;\n}\n\nexport const successStates = [\"succeeded\"];\nexport const failureStates = [\"failed\", \"canceled\", \"cancelled\"];\n/**\n * The LRO states that signal that the LRO has completed.\n */\nexport const terminalStates = successStates.concat(failureStates);\n\n/**\n * The potential location of the result of the LRO if specified by the LRO extension in the swagger.\n */\nexport type LroResourceLocationConfig = \"azure-async-operation\" | \"location\" | \"original-uri\";\n\n/**\n * The type of a LRO response body. This is just a convenience type for checking the status of the operation.\n */\n\nexport interface LroBody extends Record {\n /** The status of the operation. */\n status?: string;\n /** The state of the provisioning process */\n provisioningState?: string;\n /** The properties of the provisioning process */\n properties?: { provisioningState?: string } & Record;\n}\n\n/**\n * Simple type of the raw response.\n */\nexport interface RawResponse {\n /** The HTTP status code */\n statusCode: number;\n /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */\n headers: {\n [headerName: string]: string;\n };\n /** The parsed response body */\n body?: unknown;\n}\n\n/**\n * The type of the response of a LRO.\n */\nexport interface LroResponse {\n /** The flattened response */\n flatResponse: T;\n /** The raw response */\n rawResponse: RawResponse;\n}\n\n/** The type of which LRO implementation being followed by a specific API. */\nexport type LroMode = \"Location\" | \"Body\";\n\n/**\n * The configuration of a LRO to determine how to perform polling and checking whether the operation has completed.\n */\nexport interface LroConfig {\n /** The LRO mode */\n mode?: LroMode;\n /** The path of a provisioned resource */\n resourceLocation?: string;\n}\n\n/**\n * Type of a polling operation state that can actually be resumed.\n */\nexport type ResumablePollOperationState = PollOperationState & {\n initialRawResponse?: RawResponse;\n config?: LroConfig;\n pollingURL?: string;\n};\n\nexport interface PollerConfig {\n intervalInMs: number;\n}\n\n/**\n * The type of a terminal state of an LRO.\n */\nexport interface LroTerminalState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: true;\n}\n\n/**\n * The type of an in-progress state of an LRO.\n */\nexport interface LroInProgressState extends LroResponse {\n /**\n * Whether the operation has finished.\n */\n done: false;\n /**\n * The request to be sent next if it is different from the standard polling one.\n * Notice that it will disregard any polling URLs provided to it.\n */\n next?: () => Promise>;\n}\n\n/**\n * The type of an LRO state which is a tagged union of terminal and in-progress states.\n */\nexport type LroStatus = LroTerminalState | LroInProgressState;\n\n/**\n * The type of the getLROStatusFromResponse method. It takes the response as input and returns along the response whether the operation has finished.\n */\nexport type GetLroStatusFromResponse = (response: LroResponse) => LroStatus;\n\n/**\n * Description of a long running operation.\n */\nexport interface LongRunningOperation {\n /**\n * The request path.\n */\n requestPath: string;\n /**\n * The HTTP request method.\n */\n requestMethod: string;\n /**\n * A function that can be used to send initial request to the service.\n */\n sendInitialRequest: () => Promise>;\n /**\n * A function that can be used to poll for the current status of a long running operation.\n */\n sendPollRequest: (path: string) => Promise>;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LroBody,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction getProvisioningState(rawResponse: RawResponse): string {\n const { properties, provisioningState } = (rawResponse.body as LroBody) ?? {};\n const state: string | undefined = properties?.provisioningState ?? provisioningState;\n return typeof state === \"string\" ? state.toLowerCase() : \"succeeded\";\n}\n\nexport function isBodyPollingDone(rawResponse: RawResponse): boolean {\n const state = getProvisioningState(rawResponse);\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Creates a polling strategy based on BodyPolling which uses the provisioning state\n * from the result to determine the current operation state\n */\nexport function processBodyPollingOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: isBodyPollingDone(response.rawResponse),\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n * @internal\n */\nexport const logger = createClientLogger(\"core-lro\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroBody,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n RawResponse,\n failureStates,\n successStates,\n} from \"./models\";\nimport { isUnexpectedPollingResponse } from \"./requestUtils\";\n\nfunction isPollingDone(rawResponse: RawResponse): boolean {\n if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) {\n return false;\n }\n const { status } = (rawResponse.body as LroBody) ?? {};\n const state = typeof status === \"string\" ? status.toLowerCase() : \"succeeded\";\n if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) {\n throw new Error(`The long running operation has failed. The provisioning state: ${state}.`);\n }\n return successStates.includes(state);\n}\n\n/**\n * Sends a request to the URI of the provisioned resource if needed.\n */\nasync function sendFinalRequest(\n lro: LongRunningOperation,\n resourceLocation: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): Promise | undefined> {\n switch (lroResourceLocationConfig) {\n case \"original-uri\":\n return lro.sendPollRequest(lro.requestPath);\n case \"azure-async-operation\":\n return undefined;\n case \"location\":\n default:\n return lro.sendPollRequest(resourceLocation ?? lro.requestPath);\n }\n}\n\nexport function processLocationPollingOperationResult(\n lro: LongRunningOperation,\n resourceLocation?: string,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): (response: LroResponse) => LroStatus {\n return (response: LroResponse): LroStatus => {\n if (isPollingDone(response.rawResponse)) {\n if (resourceLocation === undefined) {\n return { ...response, done: true };\n } else {\n return {\n ...response,\n done: false,\n next: async () => {\n const finalResponse = await sendFinalRequest(\n lro,\n resourceLocation,\n lroResourceLocationConfig\n );\n return {\n ...(finalResponse ?? response),\n done: true,\n };\n },\n };\n }\n }\n return {\n ...response,\n done: false,\n };\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { LroResponse, LroStatus } from \"./models\";\n\nexport function processPassthroughOperationResult(\n response: LroResponse\n): LroStatus {\n return {\n ...response,\n done: true,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroConfig,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { getPollingUrl, inferLroMode, isUnexpectedInitialResponse } from \"./requestUtils\";\nimport { isBodyPollingDone, processBodyPollingOperationResult } from \"./bodyPolling\";\nimport { logger } from \"./logger\";\nimport { processLocationPollingOperationResult } from \"./locationPolling\";\nimport { processPassthroughOperationResult } from \"./passthrough\";\n\n/**\n * creates a stepping function that maps an LRO state to another.\n */\nexport function createGetLroStatusFromResponse(\n lroPrimitives: LongRunningOperation,\n config: LroConfig,\n lroResourceLocationConfig?: LroResourceLocationConfig\n): GetLroStatusFromResponse {\n switch (config.mode) {\n case \"Location\": {\n return processLocationPollingOperationResult(\n lroPrimitives,\n config.resourceLocation,\n lroResourceLocationConfig\n );\n }\n case \"Body\": {\n return processBodyPollingOperationResult;\n }\n default: {\n return processPassthroughOperationResult;\n }\n }\n}\n\n/**\n * Creates a polling operation.\n */\nexport function createPoll(\n lroPrimitives: LongRunningOperation\n): (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n) => Promise> {\n return async (\n path: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ): Promise> => {\n const response = await lroPrimitives.sendPollRequest(path);\n const retryAfter: string | undefined = response.rawResponse.headers[\"retry-after\"];\n if (retryAfter !== undefined) {\n // Retry-After header value is either in HTTP date format, or in seconds\n const retryAfterInSeconds = parseInt(retryAfter);\n pollerConfig.intervalInMs = isNaN(retryAfterInSeconds)\n ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs)\n : retryAfterInSeconds * 1000;\n }\n return getLroStatusFromResponse(response);\n };\n}\n\nfunction calculatePollingIntervalFromDate(\n retryAfterDate: Date,\n defaultIntervalInMs: number\n): number {\n const timeNow = Math.floor(new Date().getTime());\n const retryAfterTime = retryAfterDate.getTime();\n if (timeNow < retryAfterTime) {\n return retryAfterTime - timeNow;\n }\n return defaultIntervalInMs;\n}\n\n/**\n * Creates a callback to be used to initialize the polling operation state.\n * @param state - of the polling operation\n * @param operationSpec - of the LRO\n * @param callback - callback to be called when the operation is done\n * @returns callback that initializes the state of the polling operation\n */\nexport function createInitializeState(\n state: ResumablePollOperationState,\n requestPath: string,\n requestMethod: string\n): (response: LroResponse) => boolean {\n return (response: LroResponse): boolean => {\n if (isUnexpectedInitialResponse(response.rawResponse)) return true;\n state.initialRawResponse = response.rawResponse;\n state.isStarted = true;\n state.pollingURL = getPollingUrl(state.initialRawResponse, requestPath);\n state.config = inferLroMode(requestPath, requestMethod, state.initialRawResponse);\n /** short circuit polling if body polling is done in the initial request */\n if (\n state.config.mode === undefined ||\n (state.config.mode === \"Body\" && isBodyPollingDone(state.initialRawResponse))\n ) {\n state.result = response.flatResponse as TResult;\n state.isCompleted = true;\n }\n logger.verbose(`LRO: initial state: ${JSON.stringify(state)}`);\n return Boolean(state.isCompleted);\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n GetLroStatusFromResponse,\n LongRunningOperation,\n LroResourceLocationConfig,\n LroResponse,\n LroStatus,\n PollerConfig,\n RawResponse,\n ResumablePollOperationState,\n} from \"./models\";\nimport { PollOperation, PollOperationState } from \"../pollOperation\";\nimport { createGetLroStatusFromResponse, createInitializeState, createPoll } from \"./stateMachine\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { getPollingUrl } from \"./requestUtils\";\nimport { logger } from \"./logger\";\n\nexport class GenericPollOperation>\n implements PollOperation\n{\n private poll?: (\n pollingURL: string,\n pollerConfig: PollerConfig,\n getLroStatusFromResponse: GetLroStatusFromResponse\n ) => Promise>;\n private pollerConfig?: PollerConfig;\n private getLroStatusFromResponse?: GetLroStatusFromResponse;\n\n constructor(\n public state: TState & ResumablePollOperationState,\n private lro: LongRunningOperation,\n private lroResourceLocationConfig?: LroResourceLocationConfig,\n private processResult?: (result: unknown, state: TState) => TResult,\n private updateState?: (state: TState, lastResponse: RawResponse) => void,\n private isDone?: (lastResponse: TResult, state: TState) => boolean\n ) {}\n\n public setPollerConfig(pollerConfig: PollerConfig): void {\n this.pollerConfig = pollerConfig;\n }\n\n /**\n * General update function for LROPoller, the general process is as follows\n * 1. Check initial operation result to determine the strategy to use\n * - Strategies: Location, Azure-AsyncOperation, Original Uri\n * 2. Check if the operation result has a terminal state\n * - Terminal state will be determined by each strategy\n * 2.1 If it is terminal state Check if a final GET request is required, if so\n * send final GET request and return result from operation. If no final GET\n * is required, just return the result from operation.\n * - Determining what to call for final request is responsibility of each strategy\n * 2.2 If it is not terminal state, call the polling operation and go to step 1\n * - Determining what to call for polling is responsibility of each strategy\n * - Strategies will always use the latest URI for polling if provided otherwise\n * the last known one\n */\n async update(options?: {\n abortSignal?: AbortSignalLike;\n fireProgress?: (state: TState) => void;\n }): Promise> {\n const state = this.state;\n let lastResponse: LroResponse | undefined = undefined;\n if (!state.isStarted) {\n const initializeState = createInitializeState(\n state,\n this.lro.requestPath,\n this.lro.requestMethod\n );\n lastResponse = await this.lro.sendInitialRequest();\n initializeState(lastResponse);\n }\n\n if (!state.isCompleted) {\n if (!this.poll || !this.getLroStatusFromResponse) {\n if (!state.config) {\n throw new Error(\n \"Bad state: LRO mode is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const isDone = this.isDone;\n this.getLroStatusFromResponse = isDone\n ? (response: LroResponse) => ({\n ...response,\n done: isDone(response.flatResponse, this.state),\n })\n : createGetLroStatusFromResponse(this.lro, state.config, this.lroResourceLocationConfig);\n this.poll = createPoll(this.lro);\n }\n if (!state.pollingURL) {\n throw new Error(\n \"Bad state: polling URL is undefined. Please check if the serialized state is well-formed.\"\n );\n }\n const currentState = await this.poll(\n state.pollingURL,\n this.pollerConfig!,\n this.getLroStatusFromResponse\n );\n logger.verbose(`LRO: polling response: ${JSON.stringify(currentState.rawResponse)}`);\n if (currentState.done) {\n state.result = this.processResult\n ? this.processResult(currentState.flatResponse, state)\n : currentState.flatResponse;\n state.isCompleted = true;\n } else {\n this.poll = currentState.next ?? this.poll;\n state.pollingURL = getPollingUrl(currentState.rawResponse, state.pollingURL);\n }\n lastResponse = currentState;\n }\n logger.verbose(`LRO: current state: ${JSON.stringify(state)}`);\n if (lastResponse) {\n this.updateState?.(state, lastResponse?.rawResponse);\n } else {\n logger.error(`LRO: no response was received`);\n }\n options?.fireProgress?.(state);\n return this;\n }\n\n async cancel(): Promise> {\n this.state.isCancelled = true;\n return this;\n }\n\n /**\n * Serializes the Poller operation.\n */\n public toString(): string {\n return JSON.stringify({\n state: this.state,\n });\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n LongRunningOperation,\n LroEngineOptions,\n PollerConfig,\n ResumablePollOperationState,\n} from \"./models\";\nimport { GenericPollOperation } from \"./operation\";\nimport { PollOperationState } from \"../pollOperation\";\nimport { Poller } from \"../poller\";\n\nfunction deserializeState(\n serializedState: string\n): TState & ResumablePollOperationState {\n try {\n return JSON.parse(serializedState).state;\n } catch (e) {\n throw new Error(`LroEngine: Unable to deserialize state: ${serializedState}`);\n }\n}\n\n/**\n * The LRO Engine, a class that performs polling.\n */\nexport class LroEngine> extends Poller<\n TState,\n TResult\n> {\n private config: PollerConfig;\n\n constructor(lro: LongRunningOperation, options?: LroEngineOptions) {\n const { intervalInMs = 2000, resumeFrom } = options || {};\n const state: TState & ResumablePollOperationState = resumeFrom\n ? deserializeState(resumeFrom)\n : ({} as TState & ResumablePollOperationState);\n\n const operation = new GenericPollOperation(\n state,\n lro,\n options?.lroResourceLocationConfig,\n options?.processResult,\n options?.updateState,\n options?.isDone\n );\n super(operation);\n\n this.config = { intervalInMs: intervalInMs };\n operation.setPollerConfig(this.config);\n }\n\n /**\n * The method used by the poller to wait before attempting to update its operation.\n */\n delay(): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs));\n }\n}\n"],"names":["createClientLogger"],"mappings":";;;;;;AAAA;AACA;AAoBA;;;AAGG;AACG,MAAO,kBAAmB,SAAQ,KAAK,CAAA;AAC3C,IAAA,WAAA,CAAY,OAAe,EAAA;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,oBAAoB,CAAC;QACjC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC;KAC3D;AACF,CAAA;AAED;;;AAGG;AACG,MAAO,oBAAqB,SAAQ,KAAK,CAAA;AAC7C,IAAA,WAAA,CAAY,OAAe,EAAA;QACzB,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,sBAAsB,CAAC;QACnC,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,SAAS,CAAC,CAAC;KAC7D;AACF,CAAA;AA2DD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4DG;AACH;MACsB,MAAM,CAAA;AAiB1B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAgEG;AACH,IAAA,WAAA,CAAY,SAAyC,EAAA;QA/E7C,IAAO,CAAA,OAAA,GAAY,IAAI,CAAC;QAMxB,IAAqB,CAAA,qBAAA,GAAmC,EAAE,CAAC;AA0EjE,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CACxB,CACE,OAAkC,EAClC,MAA0E,KACxE;AACF,YAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,YAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACvB,SAAC,CACF,CAAC;;;;AAIF,QAAA,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,MAAK;;AAExB,SAAC,CAAC,CAAC;KACJ;AAyBD;;;AAGG;AACK,IAAA,MAAM,YAAY,GAAA;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC;AACtB,SAAA;QACD,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;AAC1C,YAAA,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;AAClB,YAAA,MAAM,IAAI,CAAC,KAAK,EAAE,CAAC;AACpB,SAAA;KACF;AAED;;;;;;;AAOG;AACK,IAAA,MAAM,QAAQ,CAAC,OAAA,GAA6C,EAAE,EAAA;QACpE,IAAI;AACF,YAAA,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;gBAClB,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC;oBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW;oBAChC,YAAY,EAAE,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC;AAC3C,iBAAA,CAAC,CAAC;gBACH,IAAI,IAAI,CAAC,MAAM,EAAE,IAAI,IAAI,CAAC,OAAO,EAAE;;;;;;oBAMjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,MAAiB,CAAC,CAAC;AACtD,iBAAA;AACF,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC;YAC/B,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,gBAAA,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;AAChB,aAAA;AACD,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;KACF;AAED;;;;;;;AAOG;AACK,IAAA,YAAY,CAAC,KAAa,EAAA;AAChC,QAAA,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,qBAAqB,EAAE;YACjD,QAAQ,CAAC,KAAK,CAAC,CAAC;AACjB,SAAA;KACF;AAED;;;AAGG;AACK,IAAA,MAAM,UAAU,CAAC,OAAA,GAA6C,EAAE,EAAA;AACtE,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;QACtD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,IAAI,CAAC,MAAM,CAAC,IAAI,oBAAoB,CAAC,kBAAkB,CAAC,CAAC,CAAC;AAC3D,SAAA;KACF;AAED;;;;;;;AAOG;IACI,IAAI,CAAC,UAA6C,EAAE,EAAA;AACzD,QAAA,IAAI,CAAC,IAAI,CAAC,eAAe,EAAE;YACzB,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;YAC9C,MAAM,oBAAoB,GAAG,MAAW;AACtC,gBAAA,IAAI,CAAC,eAAe,GAAG,SAAS,CAAC;AACnC,aAAC,CAAC;AACF,YAAA,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,oBAAoB,EAAE,oBAAoB,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC1F,SAAA;QACD,OAAO,IAAI,CAAC,eAAe,CAAC;KAC7B;AAED;;AAEG;AACI,IAAA,MAAM,aAAa,GAAA;QACxB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACxC,SAAA;QACD,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;;;AAKG;AACI,IAAA,UAAU,CAAC,QAAiC,EAAA;AACjD,QAAA,IAAI,CAAC,qBAAqB,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AAC1C,QAAA,OAAO,MAAW;AAChB,YAAA,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,QAAQ,CAAC,CAAC;AACxF,SAAC,CAAC;KACH;AAED;;AAEG;IACI,MAAM,GAAA;AACX,QAAA,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;AAChE,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,WAAW,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;KACvE;AAED;;AAEG;IACI,WAAW,GAAA;AAChB,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;AACjB,YAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,CAAC,MAAM,EAAE;gBACf,IAAI,CAAC,MAAM,CAAC,IAAI,kBAAkB,CAAC,gCAAgC,CAAC,CAAC,CAAC;AACvE,aAAA;AACF,SAAA;KACF;AAED;;AAEG;IACI,SAAS,GAAA;QACd,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;AAED;;;;;;;;AAQG;IACI,eAAe,CAAC,UAA6C,EAAE,EAAA;AACpE,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE;AACjB,YAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;AACrB,SAAA;AACD,QAAA,IAAI,CAAC,IAAI,CAAC,aAAa,EAAE;YACvB,IAAI,CAAC,aAAa,GAAG,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;AAC/C,SAAA;aAAM,IAAI,OAAO,CAAC,WAAW,EAAE;AAC9B,YAAA,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;AAC1D,SAAA;QACD,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+CG;IACI,iBAAiB,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;KAC7B;AAED;;;;;AAKG;IACI,SAAS,GAAA;AACd,QAAA,MAAM,KAAK,GAAgC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;QAChE,OAAO,KAAK,CAAC,MAAM,CAAC;KACrB;AAED;;;AAGG;IACI,QAAQ,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC;KAClC;AACF;;ACxgBD;AACA;AAIA;;;;;AAKG;AACa,SAAA,aAAa,CAAC,WAAwB,EAAE,WAAmB,EAAA;;IACzE,QACE,MAAA,CAAA,EAAA,GAAA,CAAA,EAAA,GAAA,sBAAsB,CAAC,WAAW,CAAC,MACnC,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,oBAAoB,CAAC,WAAW,CAAC,mCACjC,WAAW,CAAC,WAAW,CAAC,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GACxB,WAAW,EACX;AACJ,CAAC;AAED,SAAS,WAAW,CAAC,WAAwB,EAAA;AAC3C,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;AACzC,CAAC;AAED,SAAS,oBAAoB,CAAC,WAAwB,EAAA;AACpD,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACnD,CAAC;AAED,SAAS,sBAAsB,CAAC,WAAwB,EAAA;AACtD,IAAA,OAAO,WAAW,CAAC,OAAO,CAAC,sBAAsB,CAAC,CAAC;AACrD,CAAC;AAED,SAAS,oBAAoB,CAC3B,aAAqB,EACrB,WAAwB,EACxB,WAAmB,EAAA;AAEnB,IAAA,QAAQ,aAAa;QACnB,KAAK,KAAK,EAAE;AACV,YAAA,OAAO,WAAW,CAAC;AACpB,SAAA;AACD,QAAA,KAAK,MAAM,CAAC;QACZ,KAAK,OAAO,EAAE;AACZ,YAAA,OAAO,WAAW,CAAC,WAAW,CAAC,CAAC;AACjC,SAAA;AACD,QAAA,SAAS;AACP,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACF,KAAA;AACH,CAAC;SAEe,YAAY,CAC1B,WAAmB,EACnB,aAAqB,EACrB,WAAwB,EAAA;AAExB,IAAA,IACE,sBAAsB,CAAC,WAAW,CAAC,KAAK,SAAS;AACjD,QAAA,oBAAoB,CAAC,WAAW,CAAC,KAAK,SAAS,EAC/C;QACA,OAAO;AACL,YAAA,IAAI,EAAE,UAAU;YAChB,gBAAgB,EAAE,oBAAoB,CAAC,aAAa,EAAE,WAAW,EAAE,WAAW,CAAC;SAChF,CAAC;AACH,KAAA;AAAM,SAAA,IAAI,WAAW,CAAC,WAAW,CAAC,KAAK,SAAS,EAAE;QACjD,OAAO;AACL,YAAA,IAAI,EAAE,UAAU;SACjB,CAAC;AACH,KAAA;SAAM,IAAI,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,aAAa,CAAC,EAAE;QACnD,OAAO;AACL,YAAA,IAAI,EAAE,MAAM;SACb,CAAC;AACH,KAAA;AACD,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED,MAAM,eAAgB,SAAQ,KAAK,CAAA;IAEjC,WAAY,CAAA,OAAe,EAAE,UAAkB,EAAA;QAC7C,KAAK,CAAC,OAAO,CAAC,CAAC;AACf,QAAA,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;AACxB,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAE7B,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,eAAe,CAAC,SAAS,CAAC,CAAC;KACxD;AACF,CAAA;AAEK,SAAU,2BAA2B,CAAC,WAAwB,EAAA;AAClE,IAAA,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;AACpC,IAAA,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QAClD,MAAM,IAAI,eAAe,CACvB,CAAA,qCAAA,EAAwC,IAAI,CAA6D,2DAAA,CAAA,EACzG,IAAI,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAEK,SAAU,2BAA2B,CAAC,WAAwB,EAAA;AAClE,IAAA,MAAM,IAAI,GAAG,WAAW,CAAC,UAAU,CAAC;AACpC,IAAA,IAAI,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;QACxC,MAAM,IAAI,eAAe,CACvB,CAAA,qCAAA,EAAwC,IAAI,CAAmD,iDAAA,CAAA,EAC/F,IAAI,CACL,CAAC;AACH,KAAA;AACD,IAAA,OAAO,KAAK,CAAC;AACf;;AC3GA;AACA;AAkCO,MAAM,aAAa,GAAG,CAAC,WAAW,CAAC,CAAC;AACpC,MAAM,aAAa,GAAG,CAAC,QAAQ,EAAE,UAAU,EAAE,WAAW,CAAC;;ACpChE;AAaA,SAAS,oBAAoB,CAAC,WAAwB,EAAA;;AACpD,IAAA,MAAM,EAAE,UAAU,EAAE,iBAAiB,EAAE,GAAG,CAAC,EAAA,GAAA,WAAW,CAAC,IAAgB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AAC9E,IAAA,MAAM,KAAK,GAAuB,CAAA,EAAA,GAAA,UAAU,KAAV,IAAA,IAAA,UAAU,KAAV,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,UAAU,CAAE,iBAAiB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,iBAAiB,CAAC;AACrF,IAAA,OAAO,OAAO,KAAK,KAAK,QAAQ,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC;AACvE,CAAC;AAEK,SAAU,iBAAiB,CAAC,WAAwB,EAAA;AACxD,IAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;IAChD,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAC7E,QAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,CAAA,CAAA,CAAG,CAAC,CAAC;AAC7F,KAAA;AACD,IAAA,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;;AAGG;AACG,SAAU,iCAAiC,CAC/C,QAA8B,EAAA;IAE9B,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,iBAAiB,CAAC,QAAQ,CAAC,WAAW,CAAC,EAC7C,CAAA,CAAA;AACJ;;ACtCA;AAKA;;;AAGG;AACI,MAAM,MAAM,GAAGA,2BAAkB,CAAC,UAAU,CAAC;;ACTpD;AAeA,SAAS,aAAa,CAAC,WAAwB,EAAA;;IAC7C,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,WAAW,CAAC,UAAU,KAAK,GAAG,EAAE;AAC9E,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IACD,MAAM,EAAE,MAAM,EAAE,GAAG,CAAA,EAAA,GAAC,WAAW,CAAC,IAAgB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,EAAE,CAAC;AACvD,IAAA,MAAM,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,CAAC,WAAW,EAAE,GAAG,WAAW,CAAC;IAC9E,IAAI,2BAA2B,CAAC,WAAW,CAAC,IAAI,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAC7E,QAAA,MAAM,IAAI,KAAK,CAAC,kEAAkE,KAAK,CAAA,CAAA,CAAG,CAAC,CAAC;AAC7F,KAAA;AACD,IAAA,OAAO,aAAa,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AACvC,CAAC;AAED;;AAEG;AACH,eAAe,gBAAgB,CAC7B,GAAkC,EAClC,gBAAwB,EACxB,yBAAqD,EAAA;AAErD,IAAA,QAAQ,yBAAyB;AAC/B,QAAA,KAAK,cAAc;YACjB,OAAO,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AAC9C,QAAA,KAAK,uBAAuB;AAC1B,YAAA,OAAO,SAAS,CAAC;AACnB,QAAA,KAAK,UAAU,CAAC;AAChB,QAAA;AACE,YAAA,OAAO,GAAG,CAAC,eAAe,CAAC,gBAAgB,KAAhB,IAAA,IAAA,gBAAgB,KAAhB,KAAA,CAAA,GAAA,gBAAgB,GAAI,GAAG,CAAC,WAAW,CAAC,CAAC;AACnE,KAAA;AACH,CAAC;SAEe,qCAAqC,CACnD,GAAkC,EAClC,gBAAyB,EACzB,yBAAqD,EAAA;IAErD,OAAO,CAAC,QAA8B,KAAwB;AAC5D,QAAA,IAAI,aAAa,CAAC,QAAQ,CAAC,WAAW,CAAC,EAAE;YACvC,IAAI,gBAAgB,KAAK,SAAS,EAAE;AAClC,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,QAAQ,CAAA,EAAA,EAAE,IAAI,EAAE,IAAI,EAAG,CAAA,CAAA;AACpC,aAAA;AAAM,iBAAA;gBACL,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,KAAK,EACX,IAAI,EAAE,YAAW;wBACf,MAAM,aAAa,GAAG,MAAM,gBAAgB,CAC1C,GAAG,EACH,gBAAgB,EAChB,yBAAyB,CAC1B,CAAC;AACF,wBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,GACM,aAAa,KAAb,IAAA,IAAA,aAAa,KAAb,KAAA,CAAA,GAAA,aAAa,GAAI,QAAQ,EAAC,EAAA,EAC9B,IAAI,EAAE,IAAI,EACV,CAAA,CAAA;AACJ,qBAAC,EACD,CAAA,CAAA;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,QAAQ,CAAA,EAAA,EACX,IAAI,EAAE,KAAK,EACX,CAAA,CAAA;AACJ,KAAC,CAAC;AACJ;;AC9EA;AACA;AAIM,SAAU,iCAAiC,CAC/C,QAA8B,EAAA;AAE9B,IAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACK,QAAQ,CAAA,EAAA,EACX,IAAI,EAAE,IAAI,EACV,CAAA,CAAA;AACJ;;ACZA;AAmBA;;AAEG;SACa,8BAA8B,CAC5C,aAA4C,EAC5C,MAAiB,EACjB,yBAAqD,EAAA;IAErD,QAAQ,MAAM,CAAC,IAAI;QACjB,KAAK,UAAU,EAAE;YACf,OAAO,qCAAqC,CAC1C,aAAa,EACb,MAAM,CAAC,gBAAgB,EACvB,yBAAyB,CAC1B,CAAC;AACH,SAAA;QACD,KAAK,MAAM,EAAE;AACX,YAAA,OAAO,iCAAiC,CAAC;AAC1C,SAAA;AACD,QAAA,SAAS;AACP,YAAA,OAAO,iCAAiC,CAAC;AAC1C,SAAA;AACF,KAAA;AACH,CAAC;AAED;;AAEG;AACG,SAAU,UAAU,CACxB,aAA4C,EAAA;IAM5C,OAAO,OACL,IAAY,EACZ,YAA0B,EAC1B,wBAA2D,KAC5B;QAC/B,MAAM,QAAQ,GAAG,MAAM,aAAa,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC;QAC3D,MAAM,UAAU,GAAuB,QAAQ,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC;QACnF,IAAI,UAAU,KAAK,SAAS,EAAE;;AAE5B,YAAA,MAAM,mBAAmB,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC;AACjD,YAAA,YAAY,CAAC,YAAY,GAAG,KAAK,CAAC,mBAAmB,CAAC;AACpD,kBAAE,gCAAgC,CAAC,IAAI,IAAI,CAAC,UAAU,CAAC,EAAE,YAAY,CAAC,YAAY,CAAC;AACnF,kBAAE,mBAAmB,GAAG,IAAI,CAAC;AAChC,SAAA;AACD,QAAA,OAAO,wBAAwB,CAAC,QAAQ,CAAC,CAAC;AAC5C,KAAC,CAAC;AACJ,CAAC;AAED,SAAS,gCAAgC,CACvC,cAAoB,EACpB,mBAA2B,EAAA;AAE3B,IAAA,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;AACjD,IAAA,MAAM,cAAc,GAAG,cAAc,CAAC,OAAO,EAAE,CAAC;IAChD,IAAI,OAAO,GAAG,cAAc,EAAE;QAC5B,OAAO,cAAc,GAAG,OAAO,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED;;;;;;AAMG;SACa,qBAAqB,CACnC,KAA2C,EAC3C,WAAmB,EACnB,aAAqB,EAAA;IAErB,OAAO,CAAC,QAA8B,KAAa;AACjD,QAAA,IAAI,2BAA2B,CAAC,QAAQ,CAAC,WAAW,CAAC;AAAE,YAAA,CAAY;AACnE,QAAA,KAAK,CAAC,kBAAkB,GAAG,QAAQ,CAAC,WAAW,CAAC;AAChD,QAAA,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,KAAK,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACxE,QAAA,KAAK,CAAC,MAAM,GAAG,YAAY,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC,kBAAkB,CAAC,CAAC;;AAElF,QAAA,IACE,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,SAAS;AAC/B,aAAC,KAAK,CAAC,MAAM,CAAC,IAAI,KAAK,MAAM,IAAI,iBAAiB,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,EAC7E;AACA,YAAA,KAAK,CAAC,MAAM,GAAG,QAAQ,CAAC,YAAuB,CAAC;AAChD,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACD,QAAA,MAAM,CAAC,OAAO,CAAC,CAAA,oBAAA,EAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAE,CAAA,CAAC,CAAC;AAC/D,QAAA,OAAO,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;AACpC,KAAC,CAAC;AACJ;;ACjHA;MAmBa,oBAAoB,CAAA;IAW/B,WACS,CAAA,KAAoD,EACnD,GAAkC,EAClC,yBAAqD,EACrD,aAA2D,EAC3D,WAAgE,EAChE,MAA0D,EAAA;QAL3D,IAAK,CAAA,KAAA,GAAL,KAAK,CAA+C;QACnD,IAAG,CAAA,GAAA,GAAH,GAAG,CAA+B;QAClC,IAAyB,CAAA,yBAAA,GAAzB,yBAAyB,CAA4B;QACrD,IAAa,CAAA,aAAA,GAAb,aAAa,CAA8C;QAC3D,IAAW,CAAA,WAAA,GAAX,WAAW,CAAqD;QAChE,IAAM,CAAA,MAAA,GAAN,MAAM,CAAoD;KAChE;AAEG,IAAA,eAAe,CAAC,YAA0B,EAAA;AAC/C,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;AAED;;;;;;;;;;;;;;AAcG;IACH,MAAM,MAAM,CAAC,OAGZ,EAAA;;AACC,QAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;QACzB,IAAI,YAAY,GAAqC,SAAS,CAAC;AAC/D,QAAA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AACpB,YAAA,MAAM,eAAe,GAAG,qBAAqB,CAC3C,KAAK,EACL,IAAI,CAAC,GAAG,CAAC,WAAW,EACpB,IAAI,CAAC,GAAG,CAAC,aAAa,CACvB,CAAC;YACF,YAAY,GAAG,MAAM,IAAI,CAAC,GAAG,CAAC,kBAAkB,EAAE,CAAC;YACnD,eAAe,CAAC,YAAY,CAAC,CAAC;AAC/B,SAAA;AAED,QAAA,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,wBAAwB,EAAE;AAChD,gBAAA,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE;AACjB,oBAAA,MAAM,IAAI,KAAK,CACb,wFAAwF,CACzF,CAAC;AACH,iBAAA;AACD,gBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;gBAC3B,IAAI,CAAC,wBAAwB,GAAG,MAAM;sBAClC,CAAC,QAA8B,MAAK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAC/B,QAAQ,CACX,EAAA,EAAA,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,CAAC,KAAK,CAAC,EAC/C,CAAA,CAAA;AACJ,sBAAE,8BAA8B,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,yBAAyB,CAAC,CAAC;gBAC3F,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClC,aAAA;AACD,YAAA,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;AACrB,gBAAA,MAAM,IAAI,KAAK,CACb,2FAA2F,CAC5F,CAAC;AACH,aAAA;AACD,YAAA,MAAM,YAAY,GAAG,MAAM,IAAI,CAAC,IAAI,CAClC,KAAK,CAAC,UAAU,EAChB,IAAI,CAAC,YAAa,EAClB,IAAI,CAAC,wBAAwB,CAC9B,CAAC;AACF,YAAA,MAAM,CAAC,OAAO,CAAC,CAAA,uBAAA,EAA0B,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,WAAW,CAAC,CAAA,CAAE,CAAC,CAAC;YACrF,IAAI,YAAY,CAAC,IAAI,EAAE;AACrB,gBAAA,KAAK,CAAC,MAAM,GAAG,IAAI,CAAC,aAAa;sBAC7B,IAAI,CAAC,aAAa,CAAC,YAAY,CAAC,YAAY,EAAE,KAAK,CAAC;AACtD,sBAAE,YAAY,CAAC,YAAY,CAAC;AAC9B,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;AAAM,iBAAA;gBACL,IAAI,CAAC,IAAI,GAAG,CAAA,EAAA,GAAA,YAAY,CAAC,IAAI,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,IAAI,CAAC,IAAI,CAAC;AAC3C,gBAAA,KAAK,CAAC,UAAU,GAAG,aAAa,CAAC,YAAY,CAAC,WAAW,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;AAC9E,aAAA;YACD,YAAY,GAAG,YAAY,CAAC;AAC7B,SAAA;AACD,QAAA,MAAM,CAAC,OAAO,CAAC,CAAA,oBAAA,EAAuB,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAE,CAAA,CAAC,CAAC;AAC/D,QAAA,IAAI,YAAY,EAAE;AAChB,YAAA,CAAA,EAAA,GAAA,IAAI,CAAC,WAAW,MAAhB,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,IAAA,CAAA,IAAI,EAAe,KAAK,EAAE,YAAY,KAAA,IAAA,IAAZ,YAAY,KAAZ,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,YAAY,CAAE,WAAW,CAAC,CAAC;AACtD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,CAAC,KAAK,CAAC,CAAA,6BAAA,CAA+B,CAAC,CAAC;AAC/C,SAAA;AACD,QAAA,CAAA,EAAA,GAAA,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,YAAY,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,IAAA,CAArB,OAAO,EAAiB,KAAK,CAAC,CAAC;AAC/B,QAAA,OAAO,IAAI,CAAC;KACb;AAED,IAAA,MAAM,MAAM,GAAA;AACV,QAAA,IAAI,CAAC,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC9B,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;AAEG;IACI,QAAQ,GAAA;QACb,OAAO,IAAI,CAAC,SAAS,CAAC;YACpB,KAAK,EAAE,IAAI,CAAC,KAAK;AAClB,SAAA,CAAC,CAAC;KACJ;AACF;;ACvID;AAaA,SAAS,gBAAgB,CACvB,eAAuB,EAAA;IAEvB,IAAI;QACF,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC,KAAK,CAAC;AAC1C,KAAA;AAAC,IAAA,OAAO,CAAC,EAAE;AACV,QAAA,MAAM,IAAI,KAAK,CAAC,2CAA2C,eAAe,CAAA,CAAE,CAAC,CAAC;AAC/E,KAAA;AACH,CAAC;AAED;;AAEG;AACG,MAAO,SAA+D,SAAQ,MAGnF,CAAA;IAGC,WAAY,CAAA,GAAkC,EAAE,OAA2C,EAAA;QACzF,MAAM,EAAE,YAAY,GAAG,IAAI,EAAE,UAAU,EAAE,GAAG,OAAO,IAAI,EAAE,CAAC;QAC1D,MAAM,KAAK,GAAkD,UAAU;AACrE,cAAE,gBAAgB,CAAC,UAAU,CAAC;cAC3B,EAAoD,CAAC;AAE1D,QAAA,MAAM,SAAS,GAAG,IAAI,oBAAoB,CACxC,KAAK,EACL,GAAG,EACH,OAAO,KAAA,IAAA,IAAP,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,yBAAyB,EAClC,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,aAAa,EACtB,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EACpB,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,MAAM,CAChB,CAAC;QACF,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,CAAC,MAAM,GAAG,EAAE,YAAY,EAAE,YAAY,EAAE,CAAC;AAC7C,QAAA,SAAS,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;KACxC;AAED;;AAEG;IACH,KAAK,GAAA;QACH,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAK,UAAU,CAAC,MAAM,OAAO,EAAE,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC;KACxF;AACF;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/README.md b/node_modules/@azure/core-lro/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js b/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json b/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/package.json b/node_modules/@azure/core-lro/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.html b/node_modules/@azure/core-lro/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-lro/node_modules/tslib/tslib.js b/node_modules/@azure/core-lro/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/core-lro/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-lro/package.json b/node_modules/@azure/core-lro/package.json new file mode 100644 index 0000000000..7d7057f57d --- /dev/null +++ b/node_modules/@azure/core-lro/package.json @@ -0,0 +1,133 @@ +{ + "name": "@azure/core-lro", + "author": "Microsoft Corporation", + "sdk-type": "client", + "version": "2.2.4", + "description": "Isomorphic client library for supporting long-running operations in node.js and browser.", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "lro", + "polling", + "azure", + "cloud" + ], + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/core-lro.d.ts", + "files": [ + "types/core-lro.d.ts", + "dist-esm/src", + "dist/", + "README.md", + "LICENSE" + ], + "engines": { + "node": ">=12.0.0" + }, + "browser": { + "os": false, + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/index.js" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-lro/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && dev-tool run bundle", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types *.log browser statistics.html coverage src/**/*.js test/**/*.js", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "test:browser": "npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run build:test && npm run unit-test:node && npm run integration-test:node", + "test": "npm run build:test && npm run unit-test", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "cross-env TS_NODE_FILES=true TS_NODE_COMPILER_OPTIONS=\"{\\\"module\\\":\\\"commonjs\\\"}\" nyc mocha -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"" + }, + "sideEffects": false, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/core-http": "^2.0.0", + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/dev-tool": "^1.0.0", + "@azure/test-utils": "^1.0.0", + "@microsoft/api-extractor": "^7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "npm-run-all": "^4.1.5", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "uglify-js": "^3.4.9" + } +} diff --git a/node_modules/@azure/core-lro/types/core-lro.d.ts b/node_modules/@azure/core-lro/types/core-lro.d.ts new file mode 100644 index 0000000000..be4d99f0bd --- /dev/null +++ b/node_modules/@azure/core-lro/types/core-lro.d.ts @@ -0,0 +1,565 @@ +import { AbortSignalLike } from '@azure/abort-controller'; + +/** + * CancelOnProgress is used as the return value of a Poller's onProgress method. + * When a user invokes onProgress, they're required to pass in a function that will be + * called as a callback with the new data received each time the poll operation is updated. + * onProgress returns a function that will prevent any further update to reach the original callback. + */ +export declare type CancelOnProgress = () => void; + +/** + * Description of a long running operation. + */ +export declare interface LongRunningOperation { + /** + * The request path. + */ + requestPath: string; + /** + * The HTTP request method. + */ + requestMethod: string; + /** + * A function that can be used to send initial request to the service. + */ + sendInitialRequest: () => Promise>; + /** + * A function that can be used to poll for the current status of a long running operation. + */ + sendPollRequest: (path: string) => Promise>; +} + +/** + * The LRO Engine, a class that performs polling. + */ +export declare class LroEngine> extends Poller { + private config; + constructor(lro: LongRunningOperation, options?: LroEngineOptions); + /** + * The method used by the poller to wait before attempting to update its operation. + */ + delay(): Promise; +} + +/** + * Options for the LRO poller. + */ +export declare interface LroEngineOptions { + /** + * Defines how much time the poller is going to wait before making a new request to the service. + */ + intervalInMs?: number; + /** + * A serialized poller which can be used to resume an existing paused Long-Running-Operation. + */ + resumeFrom?: string; + /** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ + lroResourceLocationConfig?: LroResourceLocationConfig; + /** + * A function to process the result of the LRO. + */ + processResult?: (result: unknown, state: TState) => TResult; + /** + * A function to process the state of the LRO. + */ + updateState?: (state: TState, lastResponse: RawResponse) => void; + /** + * A predicate to determine whether the LRO finished processing. + */ + isDone?: (lastResponse: unknown, state: TState) => boolean; +} + +/** + * The potential location of the result of the LRO if specified by the LRO extension in the swagger. + */ +export declare type LroResourceLocationConfig = "azure-async-operation" | "location" | "original-uri"; + +/** + * The type of the response of a LRO. + */ +export declare interface LroResponse { + /** The flattened response */ + flatResponse: T; + /** The raw response */ + rawResponse: RawResponse; +} + +/** + * A class that represents the definition of a program that polls through consecutive requests + * until it reaches a state of completion. + * + * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. + * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. + * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. + * + * ```ts + * const poller = new MyPoller(); + * + * // Polling just once: + * await poller.poll(); + * + * // We can try to cancel the request here, by calling: + * // + * // await poller.cancelOperation(); + * // + * + * // Getting the final result: + * const result = await poller.pollUntilDone(); + * ``` + * + * The Poller is defined by two types, a type representing the state of the poller, which + * must include a basic set of properties from `PollOperationState`, + * and a return type defined by `TResult`, which can be anything. + * + * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having + * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. + * + * ```ts + * class Client { + * public async makePoller: PollerLike { + * const poller = new MyPoller({}); + * // It might be preferred to return the poller after the first request is made, + * // so that some information can be obtained right away. + * await poller.poll(); + * return poller; + * } + * } + * + * const poller: PollerLike = myClient.makePoller(); + * ``` + * + * A poller can be created through its constructor, then it can be polled until it's completed. + * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. + * At any point in time, the intermediate forms of the result type can be requested without delay. + * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. + * + * ```ts + * const poller = myClient.makePoller(); + * const state: MyOperationState = poller.getOperationState(); + * + * // The intermediate result can be obtained at any time. + * const result: MyResult | undefined = poller.getResult(); + * + * // The final result can only be obtained after the poller finishes. + * const result: MyResult = await poller.pollUntilDone(); + * ``` + * + */ +export declare abstract class Poller, TResult> implements PollerLike { + private stopped; + private resolve?; + private reject?; + private pollOncePromise?; + private cancelPromise?; + private promise; + private pollProgressCallbacks; + /** + * The poller's operation is available in full to any of the methods of the Poller class + * and any class extending the Poller class. + */ + protected operation: PollOperation; + /** + * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. + * + * When writing an implementation of a Poller, this implementation needs to deal with the initialization + * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's + * operation has already been defined, at least its basic properties. The code below shows how to approach + * the definition of the constructor of a new custom poller. + * + * ```ts + * export class MyPoller extends Poller { + * constructor({ + * // Anything you might need outside of the basics + * }) { + * let state: MyOperationState = { + * privateProperty: private, + * publicProperty: public, + * }; + * + * const operation = { + * state, + * update, + * cancel, + * toString + * } + * + * // Sending the operation to the parent's constructor. + * super(operation); + * + * // You can assign more local properties here. + * } + * } + * ``` + * + * Inside of this constructor, a new promise is created. This will be used to + * tell the user when the poller finishes (see `pollUntilDone()`). The promise's + * resolve and reject methods are also used internally to control when to resolve + * or reject anyone waiting for the poller to finish. + * + * The constructor of a custom implementation of a poller is where any serialized version of + * a previous poller's operation should be deserialized into the operation sent to the + * base constructor. For example: + * + * ```ts + * export class MyPoller extends Poller { + * constructor( + * baseOperation: string | undefined + * ) { + * let state: MyOperationState = {}; + * if (baseOperation) { + * state = { + * ...JSON.parse(baseOperation).state, + * ...state + * }; + * } + * const operation = { + * state, + * // ... + * } + * super(operation); + * } + * } + * ``` + * + * @param operation - Must contain the basic properties of `PollOperation`. + */ + constructor(operation: PollOperation); + /** + * Defines how much to wait between each poll request. + * This has to be implemented by your custom poller. + * + * \@azure/core-http has a simple implementation of a delay function that waits as many milliseconds as specified. + * This can be used as follows: + * + * ```ts + * import { delay } from "@azure/core-http"; + * + * export class MyPoller extends Poller { + * // The other necessary definitions. + * + * async delay(): Promise { + * const milliseconds = 1000; + * return delay(milliseconds); + * } + * } + * ``` + * + */ + protected abstract delay(): Promise; + /** + * Starts a loop that will break only if the poller is done + * or if the poller is stopped. + */ + private startPolling; + /** + * pollOnce does one polling, by calling to the update method of the underlying + * poll operation to make any relevant change effective. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + private pollOnce; + /** + * fireProgress calls the functions passed in via onProgress the method of the poller. + * + * It loops over all of the callbacks received from onProgress, and executes them, sending them + * the current operation state. + * + * @param state - The current operation state. + */ + private fireProgress; + /** + * Invokes the underlying operation's cancel method, and rejects the + * pollUntilDone promise. + */ + private cancelOnce; + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * @param options - Optional properties passed to the operation's update method. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller from continuing to poll. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * If it's called again before it finishes, it will throw an error. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * + * Even though TState will be the same type inside any of the methods of any extension of the Poller class, + * implementations of the pollers can customize what's shared with the public by writing their own + * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller + * and a public type representing a safe to share subset of the properties of the internal state. + * Their definition of getOperationState can then return their public type. + * + * Example: + * + * ```ts + * // Let's say we have our poller's operation state defined as: + * interface MyOperationState extends PollOperationState { + * privateProperty?: string; + * publicProperty?: string; + * } + * + * // To allow us to have a true separation of public and private state, we have to define another interface: + * interface PublicState extends PollOperationState { + * publicProperty?: string; + * } + * + * // Then, we define our Poller as follows: + * export class MyPoller extends Poller { + * // ... More content is needed here ... + * + * public getOperationState(): PublicState { + * const state: PublicState = this.operation.state; + * return { + * // Properties from PollOperationState + * isStarted: state.isStarted, + * isCompleted: state.isCompleted, + * isCancelled: state.isCancelled, + * error: state.error, + * result: state.result, + * + * // The only other property needed by PublicState. + * publicProperty: state.publicProperty + * } + * } + * } + * ``` + * + * You can see this in the tests of this repository, go to the file: + * `../test/utils/testPoller.ts` + * and look for the getOperationState implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When a poller is cancelled through the `cancelOperation` method, + * the poller will be rejected with an instance of the PollerCancelledError. + */ +export declare class PollerCancelledError extends Error { + constructor(message: string); +} + +/** + * Abstract representation of a poller, intended to expose just the minimal API that the user needs to work with. + */ +export declare interface PollerLike, TResult> { + /** + * Returns a promise that will resolve once a single polling request finishes. + * It does this by calling the update method of the Poller's operation. + */ + poll(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns a promise that will resolve once the underlying operation is completed. + */ + pollUntilDone(): Promise; + /** + * Invokes the provided callback after each polling is completed, + * sending the current state of the poller's operation. + * + * It returns a method that can be used to stop receiving updates on the given callback function. + */ + onProgress(callback: (state: TState) => void): CancelOnProgress; + /** + * Returns true if the poller has finished polling. + */ + isDone(): boolean; + /** + * Stops the poller. After this, no manual or automated requests can be sent. + */ + stopPolling(): void; + /** + * Returns true if the poller is stopped. + */ + isStopped(): boolean; + /** + * Attempts to cancel the underlying operation. + */ + cancelOperation(options?: { + abortSignal?: AbortSignalLike; + }): Promise; + /** + * Returns the state of the operation. + * The TState defined in PollerLike can be a subset of the TState defined in + * the Poller implementation. + */ + getOperationState(): TState; + /** + * Returns the result value of the operation, + * regardless of the state of the poller. + * It can return undefined or an incomplete form of the final TResult value + * depending on the implementation. + */ + getResult(): TResult | undefined; + /** + * Returns a serialized version of the poller's operation + * by invoking the operation's toString method. + */ + toString(): string; +} + +/** + * When a poller is manually stopped through the `stopPolling` method, + * the poller will be rejected with an instance of the PollerStoppedError. + */ +export declare class PollerStoppedError extends Error { + constructor(message: string); +} + +/** + * PollOperation is an interface that defines how to update the local reference of the state of the remote + * long running operation, just as well as how to request the cancellation of the same operation. + * + * It also has a method to serialize the operation so that it can be stored and resumed at any time. + */ +export declare interface PollOperation { + /** + * The state of the operation. + * It will be used to store the basic properties of PollOperationState, + * plus any custom property that the implementation may require. + */ + state: TState; + /** + * Defines how to request the remote service for updates on the status of the long running operation. + * + * It optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * Also optionally receives a "fireProgress" function, which, if called, is responsible for triggering the + * poller's onProgress callbacks. + * + * @param options - Optional properties passed to the operation's update method. + */ + update(options?: { + abortSignal?: AbortSignalLike; + fireProgress?: (state: TState) => void; + }): Promise>; + /** + * Attempts to cancel the underlying operation. + * + * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. + * + * It returns a promise that should be resolved with an updated version of the poller's operation. + * + * @param options - Optional properties passed to the operation's update method. + */ + cancel(options?: { + abortSignal?: AbortSignalLike; + }): Promise>; + /** + * Serializes the operation. + * Useful when wanting to create a poller that monitors an existing operation. + */ + toString(): string; +} + +/** + * PollOperationState contains an opinionated list of the smallest set of properties needed + * to define any long running operation poller. + * + * While the Poller class works as the local control mechanism to start triggering, wait for, + * and potentially cancel a long running operation, the PollOperationState documents the status + * of the remote long running operation. + * + * It should be updated at least when the operation starts, when it's finished, and when it's cancelled. + * Though, implementations can have any other number of properties that can be updated by other reasons. + */ +export declare interface PollOperationState { + /** + * True if the operation has started. + */ + isStarted?: boolean; + /** + * True if the operation has been completed. + */ + isCompleted?: boolean; + /** + * True if the operation has been cancelled. + */ + isCancelled?: boolean; + /** + * Will exist if the operation encountered any error. + */ + error?: Error; + /** + * Will exist if the operation concluded in a result of an expected type. + */ + result?: TResult; +} + +/** + * PollProgressCallback is the type of the callback functions sent to onProgress. + * These functions will receive a TState that is defined by your implementation of + * the Poller class. + */ +export declare type PollProgressCallback = (state: TState) => void; + +/** + * Simple type of the raw response. + */ +export declare interface RawResponse { + /** The HTTP status code */ + statusCode: number; + /** A HttpHeaders collection in the response represented as a simple JSON object where all header names have been normalized to be lower-case. */ + headers: { + [headerName: string]: string; + }; + /** The parsed response body */ + body?: unknown; +} + +export { } diff --git a/node_modules/@azure/core-paging/LICENSE b/node_modules/@azure/core-paging/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/core-paging/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-paging/README.md b/node_modules/@azure/core-paging/README.md new file mode 100644 index 0000000000..5fb49772f5 --- /dev/null +++ b/node_modules/@azure/core-paging/README.md @@ -0,0 +1,61 @@ +# Azure Core Paging client library for JavaScript + +This library provides core types for paging async iterable iterators. + +## Getting started + +### Installation + +If using this as part of another project in the [azure-sdk-for-js](https://github.com/Azure/azure-sdk-for-js) repo, +then run `rush install` after cloning the repo. + +Otherwise, use npm to install this package in your application as follows + +```javascript +npm install @azure/core-paging +``` + +## Key concepts + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +## Examples + +Example of building with the types: + +```typescript + public listSecrets( + options: ListSecretsOptions = {} + ): PagedAsyncIterableIterator { + const iter = this.listSecretsAll(options); + return { + async next() { return iter.next(); }, + [Symbol.asyncIterator]() { return this; }, + byPage: (settings: PageSettings = {}) => this.listSecretsPage(settings, options), + }; + } +``` + +And using the types: + +``` + for await (let page of client.listSecrets().byPage({ maxPageSize: 2 })) { + for (const secret of page) { + console.log("secret: ", secret); + } + } +``` + +## Next steps + +Try out this package in your application when dealing with async iterable iterators and provide feedback! + +## Troubleshooting + +Log an issue at https://github.com/Azure/azure-sdk-for-js/issues + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-paging%2FREADME.png) diff --git a/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js new file mode 100644 index 0000000000..d77960b286 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export function getPagedAsyncIterator(pagedResult) { + var _a; + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +function getItemAsyncIterator(pagedResult) { + return __asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var e_1, _a; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield __await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + yield yield __await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield __await(yield* __asyncDelegator(__asyncValues(pages))); + } + else { + yield __await(yield* __asyncDelegator(__asyncValues(firstVal.value))); + try { + for (var pages_1 = __asyncValues(pages), pages_1_1; pages_1_1 = yield __await(pages_1.next()), !pages_1_1.done;) { + const page = pages_1_1.value; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield __await(yield* __asyncDelegator(__asyncValues(page))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield __await(_a.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); +} +function getPageAsyncIterator(pagedResult, options = {}) { + return __asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield __await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); + yield yield __await(response.page); + while (response.nextPageLink) { + response = yield __await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + yield yield __await(response.page); + } + }); +} +//# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map new file mode 100644 index 0000000000..daadfa97d3 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/getPagedAsyncIterator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"getPagedAsyncIterator.js","sourceRoot":"","sources":["../../src/getPagedAsyncIterator.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAIlC;;;;;;GAMG;AACH,MAAM,UAAU,qBAAqB,CAMnC,WAAqD;;IAErD,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI;YACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;QACrB,CAAC;QACD,CAAC,MAAM,CAAC,aAAa,CAAC;YACpB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,MAAM,EACJ,MAAA,WAAW,aAAX,WAAW,uBAAX,WAAW,CAAE,MAAM,mCACnB,CAAC,CAAC,QAAuB,EAAE,EAAE;YAC3B,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,EAAE,CAAC;YAC1D,OAAO,oBAAoB,CAAC,WAAsD,EAAE;gBAClF,QAAQ,EAAE,iBAAiD;gBAC3D,WAAW;aACZ,CAAC,CAAC;QACL,CAAC,CAAC;KACL,CAAC;AACJ,CAAC;AAED,SAAgB,oBAAoB,CAClC,WAAqD;;;QAErD,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,cAAM,KAAK,CAAC,IAAI,EAAE,CAAA,CAAC;QACpC,6FAA6F;QAC7F,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;YAClC,oBAAM,QAAQ,CAAC,KAAK,CAAA,CAAC;YACrB,sFAAsF;YACtF,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,KAAmD,CAAA,CAAA,CAAA,CAAC;SAC5D;aAAM;YACL,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,QAAQ,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;;gBACtB,KAAyB,IAAA,UAAA,cAAA,KAAK,CAAA,WAAA;oBAAnB,MAAM,IAAI,kBAAA,CAAA;oBACnB,gGAAgG;oBAChG,gDAAgD;oBAChD,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,IAA6B,CAAA,CAAA,CAAA,CAAC;iBACtC;;;;;;;;;SACF;IACH,CAAC;CAAA;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE;;QAEN,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;QAC1C,IAAI,QAAQ,GAAG,cAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAA,CAAC;QAC7F,oBAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;QACpB,OAAO,QAAQ,CAAC,YAAY,EAAE;YAC5B,QAAQ,GAAG,cAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAA,CAAC;YACzE,oBAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;SACrB;IACH,CAAC;CAAA","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string\n>(\n pagedResult: PagedResult\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n ((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult as PagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {}\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n yield response.page;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/index.js b/node_modules/@azure/core-paging/dist-esm/src/index.js new file mode 100644 index 0000000000..8af0a5d397 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./models"; +export * from "./getPagedAsyncIterator"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/index.js.map b/node_modules/@azure/core-paging/dist-esm/src/index.js.map new file mode 100644 index 0000000000..a0b2ac3f43 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,UAAU,CAAC;AACzB,cAAc,yBAAyB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./models\";\nexport * from \"./getPagedAsyncIterator\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/models.js b/node_modules/@azure/core-paging/dist-esm/src/models.js new file mode 100644 index 0000000000..63155a9f04 --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/models.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist-esm/src/models.js.map b/node_modules/@azure/core-paging/dist-esm/src/models.js.map new file mode 100644 index 0000000000..8d3ed5ccea --- /dev/null +++ b/node_modules/@azure/core-paging/dist-esm/src/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * An interface that tracks the settings for paged iteration\n */\nexport interface PageSettings {\n /**\n * The token that keeps track of where to continue the iterator\n */\n continuationToken?: string;\n /**\n * The size of the page during paged iteration\n */\n maxPageSize?: number;\n}\n/**\n * An interface that allows async iterable iteration both to completion and by page.\n */\nexport interface PagedAsyncIterableIterator {\n /**\n * The next method, part of the iteration protocol\n */\n next(): Promise>;\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator](): PagedAsyncIterableIterator;\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings?: PageSettingsT) => AsyncIterableIterator;\n}\n\n/**\n * An interface that describes how to communicate with the service.\n */\nexport interface PagedResult {\n /**\n * Link to the first page of results.\n */\n firstPageLink: TLink;\n /**\n * A method that returns a page of results.\n */\n getPage: (\n pageLink: TLink,\n maxPageSize?: number\n ) => Promise<{ page: TPage; nextPageLink?: TLink }>;\n /**\n * a function to implement the `byPage` method on the paged async iterator. The default is\n * one that sets the `maxPageSizeParam` from `settings.maxPageSize`.\n */\n byPage?: (settings?: TPageSettings) => AsyncIterableIterator;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/index.js b/node_modules/@azure/core-paging/dist/index.js new file mode 100644 index 0000000000..0427571374 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/index.js @@ -0,0 +1,78 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var tslib = require('tslib'); + +// Copyright (c) Microsoft Corporation. +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +function getPagedAsyncIterator(pagedResult) { + var _a; + const iter = getItemAsyncIterator(pagedResult); + return { + next() { + return iter.next(); + }, + [Symbol.asyncIterator]() { + return this; + }, + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), + }; +} +function getItemAsyncIterator(pagedResult) { + return tslib.__asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var e_1, _a; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield tslib.__await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + yield yield tslib.__await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(pages))); + } + else { + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(firstVal.value))); + try { + for (var pages_1 = tslib.__asyncValues(pages), pages_1_1; pages_1_1 = yield tslib.__await(pages_1.next()), !pages_1_1.done;) { + const page = pages_1_1.value; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(page))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (pages_1_1 && !pages_1_1.done && (_a = pages_1.return)) yield tslib.__await(_a.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); +} +function getPageAsyncIterator(pagedResult, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield tslib.__await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + while (response.nextPageLink) { + response = yield tslib.__await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + yield yield tslib.__await(response.page); + } + }); +} + +exports.getPagedAsyncIterator = getPagedAsyncIterator; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-paging/dist/index.js.map b/node_modules/@azure/core-paging/dist/index.js.map new file mode 100644 index 0000000000..f93de0c547 --- /dev/null +++ b/node_modules/@azure/core-paging/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/getPagedAsyncIterator.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { PageSettings, PagedAsyncIterableIterator, PagedResult } from \"./models\";\n\n/**\n * returns an async iterator that iterates over results. It also has a `byPage`\n * method that returns pages of items at once.\n *\n * @param pagedResult - an object that specifies how to get pages.\n * @returns a paged async iterator that iterates over results.\n */\nexport function getPagedAsyncIterator<\n TElement,\n TPage = TElement[],\n TPageSettings = PageSettings,\n TLink = string\n>(\n pagedResult: PagedResult\n): PagedAsyncIterableIterator {\n const iter = getItemAsyncIterator(pagedResult);\n return {\n next() {\n return iter.next();\n },\n [Symbol.asyncIterator]() {\n return this;\n },\n byPage:\n pagedResult?.byPage ??\n ((settings?: PageSettings) => {\n const { continuationToken, maxPageSize } = settings ?? {};\n return getPageAsyncIterator(pagedResult as PagedResult, {\n pageLink: continuationToken as unknown as TLink | undefined,\n maxPageSize,\n });\n }),\n };\n}\n\nasync function* getItemAsyncIterator(\n pagedResult: PagedResult\n): AsyncIterableIterator {\n const pages = getPageAsyncIterator(pagedResult);\n const firstVal = await pages.next();\n // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is\n if (!Array.isArray(firstVal.value)) {\n yield firstVal.value;\n // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case\n yield* pages as unknown as AsyncIterableIterator;\n } else {\n yield* firstVal.value;\n for await (const page of pages) {\n // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch,\n // it must be the case that `TPage = TElement[]`\n yield* page as unknown as TElement[];\n }\n }\n}\n\nasync function* getPageAsyncIterator(\n pagedResult: PagedResult,\n options: {\n maxPageSize?: number;\n pageLink?: TLink;\n } = {}\n): AsyncIterableIterator {\n const { pageLink, maxPageSize } = options;\n let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize);\n yield response.page;\n while (response.nextPageLink) {\n response = await pagedResult.getPage(response.nextPageLink, maxPageSize);\n yield response.page;\n }\n}\n"],"names":["__await","__asyncDelegator","__asyncValues"],"mappings":";;;;;;AAAA;AAKA;;;;;;AAMG;AACG,SAAU,qBAAqB,CAMnC,WAAqD,EAAA;;AAErD,IAAA,MAAM,IAAI,GAAG,oBAAoB,CAAwC,WAAW,CAAC,CAAC;IACtF,OAAO;QACL,IAAI,GAAA;AACF,YAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;SACpB;QACD,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,YAAA,OAAO,IAAI,CAAC;SACb;AACD,QAAA,MAAM,EACJ,CAAA,EAAA,GAAA,WAAW,KAAA,IAAA,IAAX,WAAW,KAAX,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,WAAW,CAAE,MAAM,MACnB,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,IAAC,CAAC,QAAuB,KAAI;AAC3B,YAAA,MAAM,EAAE,iBAAiB,EAAE,WAAW,EAAE,GAAG,QAAQ,KAAA,IAAA,IAAR,QAAQ,KAAA,KAAA,CAAA,GAAR,QAAQ,GAAI,EAAE,CAAC;YAC1D,OAAO,oBAAoB,CAAC,WAAsD,EAAE;AAClF,gBAAA,QAAQ,EAAE,iBAAiD;gBAC3D,WAAW;AACZ,aAAA,CAAC,CAAC;AACL,SAAC,CAAC;KACL,CAAC;AACJ,CAAC;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EAAA;;;AAErD,QAAA,MAAM,KAAK,GAAG,oBAAoB,CAAC,WAAW,CAAC,CAAC;QAChD,MAAM,QAAQ,GAAG,MAAMA,aAAA,CAAA,KAAK,CAAC,IAAI,EAAE,CAAA,CAAC;;QAEpC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE;AAClC,YAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,KAAK,CAAA,CAAC;;YAErB,MAAAA,aAAA,CAAA,OAAOC,sBAAA,CAAAC,oBAAA,KAAmD,CAAA,CAAA,CAAA,CAAC;AAC5D,SAAA;AAAM,aAAA;YACL,MAAAF,aAAA,CAAA,OAAOC,sBAAA,CAAAC,mBAAA,CAAA,QAAQ,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;;AACtB,gBAAA,KAAyB,IAAA,OAAA,GAAAA,mBAAA,CAAA,KAAK,CAAA,EAAA,SAAA,EAAA,SAAA,GAAA,MAAAF,aAAA,CAAA,OAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,SAAA,CAAA,IAAA,GAAA;oBAAnB,MAAM,IAAI,kBAAA,CAAA;;;oBAGnB,MAAAA,aAAA,CAAA,OAAOC,sBAAA,CAAAC,oBAAA,IAA6B,CAAA,CAAA,CAAA,CAAC;AACtC,iBAAA;;;;;;;;;AACF,SAAA;KACF,CAAA,CAAA;AAAA,CAAA;AAED,SAAgB,oBAAoB,CAClC,WAAqD,EACrD,UAGI,EAAE,EAAA;;AAEN,QAAA,MAAM,EAAE,QAAQ,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;QAC1C,IAAI,QAAQ,GAAG,MAAMF,aAAA,CAAA,WAAW,CAAC,OAAO,CAAC,QAAQ,KAAR,IAAA,IAAA,QAAQ,cAAR,QAAQ,GAAI,WAAW,CAAC,aAAa,EAAE,WAAW,CAAC,CAAA,CAAC;AAC7F,QAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;QACpB,OAAO,QAAQ,CAAC,YAAY,EAAE;AAC5B,YAAA,QAAQ,GAAG,MAAAA,aAAA,CAAM,WAAW,CAAC,OAAO,CAAC,QAAQ,CAAC,YAAY,EAAE,WAAW,CAAC,CAAA,CAAC;AACzE,YAAA,MAAA,MAAAA,aAAA,CAAM,QAAQ,CAAC,IAAI,CAAA,CAAC;AACrB,SAAA;KACF,CAAA,CAAA;AAAA;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/README.md b/node_modules/@azure/core-paging/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js b/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json b/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/package.json b/node_modules/@azure/core-paging/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.html b/node_modules/@azure/core-paging/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-paging/node_modules/tslib/tslib.js b/node_modules/@azure/core-paging/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/core-paging/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-paging/package.json b/node_modules/@azure/core-paging/package.json new file mode 100644 index 0000000000..3bfdc6ea32 --- /dev/null +++ b/node_modules/@azure/core-paging/package.json @@ -0,0 +1,108 @@ +{ + "name": "@azure/core-paging", + "author": "Microsoft Corporation", + "sdk-type": "client", + "version": "1.3.0", + "description": "Core types for paging async iterable iterators", + "tags": [ + "microsoft", + "clientruntime" + ], + "keywords": [ + "microsoft", + "clientruntime", + "azure", + "cloud" + ], + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "types": "./types/latest/core-paging.d.ts", + "typesVersions": { + "<3.6": { + "types/latest/*": [ + "types/3.1/*" + ] + } + }, + "files": [ + "types/latest/core-paging.d.ts", + "types/3.1", + "dist/", + "dist-esm/src/", + "LICENSE", + "README.md" + ], + "engines": { + "node": ">=12.0.0" + }, + "license": "MIT", + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/core-paging/README.md", + "repository": "github:Azure/azure-sdk-for-js", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "echo skipped", + "build:types": "downlevel-dts types/latest/ types/3.1/", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp *.tgz types *.log", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint": "eslint package.json src --ext .ts", + "lint:fix": "eslint package.json src --ext .ts --fix --fix-type [problem,suggestion]", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 50000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "sideEffects": true, + "private": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@microsoft/api-extractor": "7.18.11", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "downlevel-dts": "^0.8.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "prettier": "^2.5.1", + "rimraf": "^3.0.0", + "typescript": "~4.6.0" + }, + "//sampleConfiguration": { + "skipFolder": true, + "disableDocsMs": true, + "productName": "Azure SDK Core", + "productSlugs": [ + "azure" + ] + } +} diff --git a/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts b/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts new file mode 100644 index 0000000000..72a7216cd3 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/core-paging.d.ts @@ -0,0 +1,60 @@ +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export declare interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export declare interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} +/** + * An interface that tracks the settings for paged iteration + */ +export declare interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +export {}; diff --git a/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts b/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts new file mode 100644 index 0000000000..deb1df2ea0 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/samples-dev/getPagedAsyncIteratorSample.d.ts @@ -0,0 +1,2 @@ +export declare function main(): Promise; +//# sourceMappingURL=getPagedAsyncIteratorSample.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts b/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts new file mode 100644 index 0000000000..cc5517ee54 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/getPagedAsyncIterator.d.ts @@ -0,0 +1,10 @@ +import { PageSettings, PagedAsyncIterableIterator, PagedResult } from "./models"; +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; +//# sourceMappingURL=getPagedAsyncIterator.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/index.d.ts b/node_modules/@azure/core-paging/types/3.1/src/index.d.ts new file mode 100644 index 0000000000..b116c0313b --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/index.d.ts @@ -0,0 +1,3 @@ +export * from "./models"; +export * from "./getPagedAsyncIterator"; +//# sourceMappingURL=index.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/src/models.d.ts b/node_modules/@azure/core-paging/types/3.1/src/models.d.ts new file mode 100644 index 0000000000..f7cd300030 --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/src/models.d.ts @@ -0,0 +1,52 @@ +/** + * An interface that tracks the settings for paged iteration + */ +export interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} +/** + * An interface that describes how to communicate with the service. + */ +export interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} +//# sourceMappingURL=models.d.ts.map diff --git a/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts b/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts new file mode 100644 index 0000000000..5aba89c23c --- /dev/null +++ b/node_modules/@azure/core-paging/types/3.1/test/getPagedAsyncIterator.spec.d.ts @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=getPagedAsyncIterator.spec.d.ts.map diff --git a/node_modules/@azure/core-paging/types/latest/core-paging.d.ts b/node_modules/@azure/core-paging/types/latest/core-paging.d.ts new file mode 100644 index 0000000000..7759fe3cc3 --- /dev/null +++ b/node_modules/@azure/core-paging/types/latest/core-paging.d.ts @@ -0,0 +1,64 @@ +/** + * returns an async iterator that iterates over results. It also has a `byPage` + * method that returns pages of items at once. + * + * @param pagedResult - an object that specifies how to get pages. + * @returns a paged async iterator that iterates over results. + */ +export declare function getPagedAsyncIterator(pagedResult: PagedResult): PagedAsyncIterableIterator; + +/** + * An interface that allows async iterable iteration both to completion and by page. + */ +export declare interface PagedAsyncIterableIterator { + /** + * The next method, part of the iteration protocol + */ + next(): Promise>; + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator](): PagedAsyncIterableIterator; + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings?: PageSettingsT) => AsyncIterableIterator; +} + +/** + * An interface that describes how to communicate with the service. + */ +export declare interface PagedResult { + /** + * Link to the first page of results. + */ + firstPageLink: TLink; + /** + * A method that returns a page of results. + */ + getPage: (pageLink: TLink, maxPageSize?: number) => Promise<{ + page: TPage; + nextPageLink?: TLink; + }>; + /** + * a function to implement the `byPage` method on the paged async iterator. The default is + * one that sets the `maxPageSizeParam` from `settings.maxPageSize`. + */ + byPage?: (settings?: TPageSettings) => AsyncIterableIterator; +} + +/** + * An interface that tracks the settings for paged iteration + */ +export declare interface PageSettings { + /** + * The token that keeps track of where to continue the iterator + */ + continuationToken?: string; + /** + * The size of the page during paged iteration + */ + maxPageSize?: number; +} + +export { } diff --git a/node_modules/@azure/core-tracing/CHANGELOG.md b/node_modules/@azure/core-tracing/CHANGELOG.md new file mode 100644 index 0000000000..ee8b4b0471 --- /dev/null +++ b/node_modules/@azure/core-tracing/CHANGELOG.md @@ -0,0 +1,81 @@ +# Release History + +## 1.0.0-preview.13 (2021-07-15) + +### Features Added + +- Added support for disabling distributed tracing using the AZURE_TRACING_DISABLED environment variable. + +### Breaking Changes + +- Removed `TestTracer` and `TestSpan` from public API and into `@azure/test-utils`. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + - `TestTracer` and `TestSpan` are intended for test support when used by other Azure packages and not intended for use by end users. +- Removed `setTracer`, @azure/core-tracing will now rely on the `trace` API to fetch a tracer from the global tracer provider. [PR #16347](https://github.com/Azure/azure-sdk-for-js/pull/16347) + - If you are using `setTracer`, please use `trace.setGlobalTracerProvider(provider)` instead as described in the OpenTelemetry documentation. +- Removed `NoOpTracer` and `NoOpSpan` from the public API. Please use `trace.wrapSpanContext(INVALID_SPAN_CONTEXT)` from `@opentelemetry/api` instead. [PR #16315](https://github.com/Azure/azure-sdk-for-js/pull/16315) + +## 1.0.0-preview.12 (2021-06-30) + +- Update `@opentelemetry/api` to version 1.0.0 [PR #15883](https://github.com/Azure/azure-sdk-for-js/pull/15883) + - This version ships with ESM modules and fixes an issue where Angular projects would warn ab out optimization bailouts due to dependencies on CommonJS or AMD. + +### Breaking Changes + +- Removed `OpenCensusSpanWrapper` and `OpenCensusTracerWrapper` from the public API. Customers using these wrappers should migrate to using `OpenTelemetry` directly. [PR #15770](https://github.com/Azure/azure-sdk-for-js/pull/15770) +- Update `@azure/core-tracing` to version 1.0.0-preview.12. This brings core-tracing up to date with `@opentelemetry/api@1.0.0`. + - `Span#context` was renamed to `Span#spanContext`. This change is supported in `@azure/core-http@1.2.7`. + +## 1.0.0-preview.11 (2021-03-30) + +### Breaking Changes + +- Update @azure/core-tracing to version 1.0.0-preview.11. This brings core-tracing up to date with @opentelemetry/api@1.0.0-rc.0. + There are two scenarios that will require changes if you are using tracing: + - Previously, you would pass a parent span using the `OperationOptions.tracingOptions.spanOptions.parentSpan` property. This has been + changed so that you now specify a parent `Context` using the `OperationOptions.tracingOptions.tracingContext` property instead. + - The status code for Spans is no longer of type `CanonicalCode`. Instead, it's now `SpanStatusCode`, which also has a smaller range of values. + +## 1.0.0-preview.10 (2021-03-04) + +- Internal improvements to make future opentelemetry updates simpler. + +## 1.0.0-preview.9 (2020-08-04) + +- Update `@opentelemetry/api` to version 0.10.2 [PR 10393](https://github.com/Azure/azure-sdk-for-js/pull/10393) + +## 1.0.0-preview.8 (2020-04-28) + +- Update `TestSpan` to allow setting span attributes [PR link](https://github.com/Azure/azure-sdk-for-js/pull/6565). +- [BREAKING] Migrate to OpenTelemetry 0.6 using the new `@opentelemetry/api` package. There were a few breaking changes: + - `SpanContext` now requires traceFlags to be set. + - `Tracer` has removed `recordSpanData`, `getBinaryFormat`, and `getHttpTextFormat`. + - `Tracer.getCurrentSpan` returns `undefined` instead of `null` when unset. + - `Link` objects renamed `spanContext` property to `context`. + +## 1.0.0-preview.7 (2019-12-03) + +- Updated the behavior of how incompatible versions of OpenTelemetry Tracer are handled. Now, errors will be thrown only if the user has manually set a Tracer. This means that incompatible versions will be silently ignored when tracing is not enabled. +- Updated to use OpenTelemetry 0.2 via the `@opentelemetry/types` package. There were two breaking changes in this update: + - `isRecordingEvents` on `Span` was renamed to `isRecording`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/454) + - `addLink` was removed from `Span` as links are now only allowed to be added during span creation. This is possible by specifying any necessary links inside `SpanOptions`. [PR link](https://github.com/open-telemetry/opentelemetry-js/pull/449) + +## 1.0.0-preview.5 (2019-10-22) + +- Fixes issue where loading multiple copies of this module could result in the tracer set by `setTracer()` being reset. + +## 1.0.0-preview.4 (2019-10-08) + +- Remove dependency on the `debug` module to ensure compatibility with IE11 + +## 1.0.0-preview.3 (2019-10-07) + +- Updated to use the latest types from OpenTelemetry (PR [#5182](https://github.com/Azure/azure-sdk-for-js/pull/5182)) +- Clean up and refactored code for easier usage and testability. (PR [#5233](https://github.com/Azure/azure-sdk-for-js/pull/5233) and PR [#5283](https://github.com/Azure/azure-sdk-for-js/pull/5283)) + +## 1.0.0-preview.2 (2019-09-09) + +Updated the `OpenCensusSpanPlugin` & the `NoOpSpanPlugin` to support for retrieving span context. This allows updating of request headers with the right [span context](https://www.w3.org/TR/trace-context/#trace-context-http-headers-format). (PR [#4712](https://github.com/Azure/azure-sdk-for-js/pull/4712)) + +## 1.0.0-preview.1 (2019-08-05) + +Provides low-level interfaces and helper methods for tracing in Azure SDK diff --git a/node_modules/@azure/core-tracing/LICENSE b/node_modules/@azure/core-tracing/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/core-tracing/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/core-tracing/README.md b/node_modules/@azure/core-tracing/README.md new file mode 100644 index 0000000000..dc088cba65 --- /dev/null +++ b/node_modules/@azure/core-tracing/README.md @@ -0,0 +1,72 @@ +# Azure Core tracing library for JavaScript + +This is the core tracing library that provides low-level interfaces and helper methods for tracing in Azure SDK JavaScript libraries which work in the browser and Node.js. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/core-tracing +``` + +## Key Concepts + +The `@azure/core-tracing` package supports enabling tracing for Azure SDK packages, using an [OpenTelemetry](https://opentelemetry.io/) `Tracer`. + +By default, all libraries log with a `NoOpTracer` that takes no action. +To change this, you have to use `setTracer` to set a new default `Tracer`. + +## Examples + +### Example 1 - Setting an OpenTelemetry Tracer + +```js +const opentelemetry = require("@opentelemetry/api"); +const { BasicTracer, SimpleSpanProcessor } = require("@opentelemetry/tracing"); +const { ZipkinExporter } = require("@opentelemetry/exporter-zipkin"); +const { setTracer } = require("@azure/core-tracing"); + +const exporter = new ZipkinExporter({ + serviceName: "azure-tracing-sample" +}); +const tracer = new BasicTracer(); +tracer.addSpanProcessor(new SimpleSpanProcessor(exporter)); + +setTracer(tracer); + +const rootSpan = tracer.startSpan("root"); +const context = opentelemetry.setSpan(opentelemetry.context.active(), rootSpan); + +// Call some client library methods and pass rootSpan via tracingOptions. + +rootSpan.end(); +exporter.shutdown(); +``` + +### Example 2 - Passing current Context to library operations + +```js +// Given a BlobClient from @azure/storage-blob +const result = await blobClient.download(undefined, undefined, { + tracingOptions: { + tracingContext: context + } +}); +``` + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Fcore-tracing%2FREADME.png) diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js new file mode 100644 index 0000000000..b021cccc28 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { SpanKind, setSpan, context as otContext, getTracer } from "./interfaces"; +import { trace, INVALID_SPAN_CONTEXT } from "@opentelemetry/api"; +export function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} +//# sourceMappingURL=createSpan.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map new file mode 100644 index 0000000000..ab64b6179d --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"createSpan.js","sourceRoot":"","sources":["../../src/createSpan.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAIL,QAAQ,EACR,OAAO,EACP,OAAO,IAAI,SAAS,EACpB,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,KAAK,EAAE,oBAAoB,EAAE,MAAM,oBAAoB,CAAC;AAuBjE,MAAM,UAAU,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;QAClC,qDAAqD;QACrD,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;GAiBG;AACH,MAAM,UAAU,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAE,QAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAG,KAAK,CAAC,eAAe,CAAC,oBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAI,SAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js b/node_modules/@azure/core-tracing/dist-esm/src/index.js new file mode 100644 index 0000000000..065b66c99d --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// Tracers and wrappers +export { createSpanFunction } from "./createSpan"; +// Shared interfaces +export { context, getSpan, getSpanContext, getTracer, isSpanContextValid, setSpan, setSpanContext, SpanKind, SpanStatusCode } from "./interfaces"; +// Utilities +export { extractSpanContextFromTraceParentHeader, getTraceParentHeader } from "./utils/traceParentHeader"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js.map b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map new file mode 100644 index 0000000000..37bcb7476c --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,uBAAuB;AACvB,OAAO,EAAE,kBAAkB,EAA0B,MAAM,cAAc,CAAC;AAE1E,oBAAoB;AACpB,OAAO,EACL,OAAO,EAOP,OAAO,EACP,cAAc,EACd,SAAS,EAET,kBAAkB,EAGlB,OAAO,EACP,cAAc,EAKd,QAAQ,EAGR,cAAc,EAKf,MAAM,cAAc,CAAC;AAEtB,YAAY;AACZ,OAAO,EACL,uCAAuC,EACvC,oBAAoB,EACrB,MAAM,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// Tracers and wrappers\nexport { createSpanFunction, CreateSpanFunctionArgs } from \"./createSpan\";\n\n// Shared interfaces\nexport {\n context,\n Context,\n ContextAPI,\n Exception,\n ExceptionWithCode,\n ExceptionWithMessage,\n ExceptionWithName,\n getSpan,\n getSpanContext,\n getTracer,\n HrTime,\n isSpanContextValid,\n Link,\n OperationTracingOptions,\n setSpan,\n setSpanContext,\n Span,\n SpanAttributes,\n SpanAttributeValue,\n SpanContext,\n SpanKind,\n SpanOptions,\n SpanStatus,\n SpanStatusCode,\n TimeInput,\n TraceFlags,\n Tracer,\n TraceState\n} from \"./interfaces\";\n\n// Utilities\nexport {\n extractSpanContextFromTraceParentHeader,\n getTraceParentHeader\n} from \"./utils/traceParentHeader\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js new file mode 100644 index 0000000000..e251b1ff9e --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js @@ -0,0 +1,102 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { context as otContext, trace as otTrace } from "@opentelemetry/api"; +/** + * The kind of span. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export function getSpan(context) { + return otTrace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export function setSpan(context, span) { + return otTrace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return otTrace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export function getSpanContext(context) { + return otTrace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export function isSpanContextValid(context) { + return otTrace.isSpanContextValid(context); +} +export function getTracer(name, version) { + return otTrace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +export const context = otContext; +/** SpanStatusCode */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map new file mode 100644 index 0000000000..cd371cd92e --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js.map @@ -0,0 +1 @@ +{"version":3,"file":"interfaces.js","sourceRoot":"","sources":["../../src/interfaces.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,OAAO,IAAI,SAAS,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,oBAAoB,CAAC;AAoF5E;;GAEG;AACH,MAAM,CAAN,IAAY,QAyBX;AAzBD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IACZ;;;OAGG;IACH,2CAAU,CAAA;IACV;;;OAGG;IACH,2CAAU,CAAA;IACV;;;;OAIG;IACH,+CAAY,CAAA;IACZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EAzBW,QAAQ,KAAR,QAAQ,QAyBnB;AAqDD;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;IAC7C,OAAO,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;GAOG;AACH,MAAM,UAAU,kBAAkB,CAAC,OAAoB;IACrD,OAAO,OAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;AAUD,MAAM,UAAU,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAO,OAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED,iCAAiC;AACjC,MAAM,CAAC,MAAM,OAAO,GAAe,SAAS,CAAC;AAE7C,qBAAqB;AACrB,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js new file mode 100644 index 0000000000..90657bafe0 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} +//# sourceMappingURL=traceParentHeader.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map new file mode 100644 index 0000000000..d63dd9d23a --- /dev/null +++ b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"traceParentHeader.js","sourceRoot":"","sources":["../../../src/utils/traceParentHeader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;GAIG;AACH,MAAM,UAAU,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,gBAAmB,CAAC;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC;IAErE,uEAAuE;IACvE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/index.js b/node_modules/@azure/core-tracing/dist/index.js new file mode 100644 index 0000000000..527bbb4f18 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js @@ -0,0 +1,219 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var api = require('@opentelemetry/api'); + +// Copyright (c) Microsoft Corporation. +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(exports.SpanKind || (exports.SpanKind = {})); +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +function getSpan(context) { + return api.trace.getSpan(context); +} +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +function setSpan(context, span) { + return api.trace.setSpan(context, span); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return api.trace.setSpanContext(context, spanContext); +} +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +function getSpanContext(context) { + return api.trace.getSpanContext(context); +} +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +function isSpanContextValid(context) { + return api.trace.isSpanContextValid(context); +} +function getTracer(name, version) { + return api.trace.getTracer(name || "azure/core-tracing", version); +} +/** Entrypoint for context API */ +const context = api.context; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); + +// Copyright (c) Microsoft Corporation. +function isTracingDisabled() { + var _a; + if (typeof process === "undefined") { + // not supported in browser for now without polyfills + return false; + } + const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { + return false; + } + return Boolean(azureTracingDisabledValue); +} +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +function createSpanFunction(args) { + return function (operationName, operationOptions) { + const tracer = getTracer(); + const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; + const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); + const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; + let span; + if (isTracingDisabled()) { + span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); + } + else { + span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); + } + if (args.namespace) { + span.setAttribute("az.namespace", args.namespace); + } + let newSpanOptions = tracingOptions.spanOptions || {}; + if (span.isRecording() && args.namespace) { + newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); + } + const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); + const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); + return { + span, + updatedOptions: newOperationOptions + }; + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const VERSION = "00"; +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +function extractSpanContextFromTraceParentHeader(traceParentHeader) { + const parts = traceParentHeader.split("-"); + if (parts.length !== 4) { + return; + } + const [version, traceId, spanId, traceOptions] = parts; + if (version !== VERSION) { + return; + } + const traceFlags = parseInt(traceOptions, 16); + const spanContext = { + spanId, + traceId, + traceFlags + }; + return spanContext; +} +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +function getTraceParentHeader(spanContext) { + const missingFields = []; + if (!spanContext.traceId) { + missingFields.push("traceId"); + } + if (!spanContext.spanId) { + missingFields.push("spanId"); + } + if (missingFields.length) { + return; + } + const flags = spanContext.traceFlags || 0 /* NONE */; + const hexFlags = flags.toString(16); + const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; + // https://www.w3.org/TR/trace-context/#traceparent-header-field-values + return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; +} + +exports.context = context; +exports.createSpanFunction = createSpanFunction; +exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; +exports.getSpan = getSpan; +exports.getSpanContext = getSpanContext; +exports.getTraceParentHeader = getTraceParentHeader; +exports.getTracer = getTracer; +exports.isSpanContextValid = isSpanContextValid; +exports.setSpan = setSpan; +exports.setSpanContext = setSpanContext; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-tracing/dist/index.js.map b/node_modules/@azure/core-tracing/dist/index.js.map new file mode 100644 index 0000000000..c1b0268e7c --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/interfaces.ts","../src/createSpan.ts","../src/utils/traceParentHeader.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { context as otContext, trace as otTrace } from \"@opentelemetry/api\";\n\n/**\n * A Tracer.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method does NOT modify the current Context.\n *\n * @param name - The name of the span\n * @param options - SpanOptions used for span creation\n * @param context - Context to use to extract parent\n * @returns The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n}\n\n/**\n * TraceState.\n */\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key - key of the TraceState entry.\n * @param value - value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key - the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key - with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n\n/**\n * Represents high resolution time.\n */\nexport declare type HrTime = [number, number];\n\n/**\n * Used to represent a Time.\n */\nexport type TimeInput = HrTime | number | Date;\n\n/**\n * The status for a span.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * The kind of span.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4\n}\n\n/**\n * An Exception for a Span.\n */\nexport declare type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n\n/**\n * An Exception with a code.\n */\nexport interface ExceptionWithCode {\n /** The code. */\n code: string | number;\n /** The name. */\n name?: string;\n /** The message. */\n message?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a message.\n */\nexport interface ExceptionWithMessage {\n /** The code. */\n code?: string | number;\n /** The message. */\n message: string;\n /** The name. */\n name?: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * An Exception with a name.\n */\nexport interface ExceptionWithName {\n /** The code. */\n code?: string | number;\n /** The message. */\n message?: string;\n /** The name. */\n name: string;\n /** The stack. */\n stack?: string;\n}\n\n/**\n * Return the span if one exists\n *\n * @param context - context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return otTrace.getSpan(context);\n}\n\n/**\n * Set the span on a context\n *\n * @param context - context to use as parent\n * @param span - span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return otTrace.setSpan(context, span);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context - context to set active span on\n * @param spanContext - span context to be wrapped\n */\nexport function setSpanContext(context: Context, spanContext: SpanContext): Context {\n return otTrace.setSpanContext(context, spanContext);\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context - context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return otTrace.getSpanContext(context);\n}\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport interface ContextAPI {\n /**\n * Get the currently active context\n */\n active(): Context;\n}\n\n/**\n * Returns true of the given {@link SpanContext} is valid.\n * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec.\n *\n * @param context - the {@link SpanContext} to validate.\n *\n * @returns true if the {@link SpanContext} is valid, false otherwise.\n */\nexport function isSpanContextValid(context: SpanContext): boolean {\n return otTrace.isSpanContextValid(context);\n}\n\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(): Tracer;\n/**\n * Retrieves a tracer from the global tracer provider.\n */\nexport function getTracer(name: string, version?: string): Tracer;\nexport function getTracer(name?: string, version?: string): Tracer {\n return otTrace.getTracer(name || \"azure/core-tracing\", version);\n}\n\n/** Entrypoint for context API */\nexport const context: ContextAPI = otContext;\n\n/** SpanStatusCode */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2\n}\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key - the key for this attribute.\n * @param value - the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n /**\n * Sets attributes to the span.\n *\n * @param attributes - the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n /**\n * Adds an event to the Span.\n *\n * @param name - the name of the event.\n * @param attributesOrStartTime - the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is TimeInput and 3rd param is undefined\n * @param startTime - start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status - the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param endTime - the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception - the exception the only accepted values are string or Error\n * @param time - the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name - the Span name.\n */\n updateName(name: string): this;\n}\n\n/**\n * Shorthand enum for common traceFlags values inside SpanContext\n */\nexport const enum TraceFlags {\n /** No flag set. */\n NONE = 0x0,\n /** Caller is collecting trace information. */\n SAMPLED = 0x1\n}\n\n/**\n * A light interface that tries to be structurally compatible with OpenTelemetry\n */\nexport interface SpanContext {\n /**\n * UUID of a trace.\n */\n traceId: string;\n /**\n * UUID of a Span.\n */\n spanId: string;\n /**\n * https://www.w3.org/TR/trace-context/#trace-flags\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n\n/**\n * Used to specify a span that is linked to another.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n\n/**\n * Attributes for a Span.\n */\nexport interface SpanAttributes {\n /**\n * Attributes for a Span.\n */\n [attributeKey: string]: SpanAttributeValue | undefined;\n}\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport declare type SpanAttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n\n/**\n * An interface that enables manual propagation of Spans\n */\nexport interface SpanOptions {\n /**\n * Attributes to set on the Span\n */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /**\n * The type of Span. Default to SpanKind.INTERNAL\n */\n kind?: SpanKind;\n\n /**\n * A manually specified start time for the created `Span` object.\n */\n startTime?: TimeInput;\n}\n\n/**\n * Tracing options to set on an operation.\n */\nexport interface OperationTracingOptions {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context to use for created Spans.\n */\n tracingContext?: Context;\n}\n\n/**\n * OpenTelemetry compatible interface for Context\n */\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key - key which identifies a context value\n */\n getValue(key: symbol): unknown;\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key - context key for which to set the value\n * @param value - value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key - context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n OperationTracingOptions,\n Span,\n SpanOptions,\n SpanKind,\n setSpan,\n context as otContext,\n getTracer\n} from \"./interfaces\";\nimport { trace, INVALID_SPAN_CONTEXT } from \"@opentelemetry/api\";\n\n/**\n * Arguments for `createSpanFunction` that allow you to specify the\n * prefix for each created span as well as the `az.namespace` attribute.\n *\n * @hidden\n */\nexport interface CreateSpanFunctionArgs {\n /**\n * Package name prefix.\n *\n * NOTE: if this is empty no prefix will be applied to created Span names.\n */\n packagePrefix: string;\n /**\n * Service namespace\n *\n * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans.\n */\n namespace: string;\n}\n\nexport function isTracingDisabled(): boolean {\n if (typeof process === \"undefined\") {\n // not supported in browser for now without polyfills\n return false;\n }\n\n const azureTracingDisabledValue = process.env.AZURE_TRACING_DISABLED?.toLowerCase();\n\n if (azureTracingDisabledValue === \"false\" || azureTracingDisabledValue === \"0\") {\n return false;\n }\n\n return Boolean(azureTracingDisabledValue);\n}\n\n/**\n * Creates a function that can be used to create spans using the global tracer.\n *\n * Usage:\n *\n * ```typescript\n * // once\n * const createSpan = createSpanFunction({ packagePrefix: \"Azure.Data.AppConfiguration\", namespace: \"Microsoft.AppConfiguration\" });\n *\n * // in each operation\n * const span = createSpan(\"deleteConfigurationSetting\", operationOptions);\n * // code...\n * span.end();\n * ```\n *\n * @hidden\n * @param args - allows configuration of the prefix for each span as well as the az.namespace field.\n */\nexport function createSpanFunction(args: CreateSpanFunctionArgs) {\n return function(\n operationName: string,\n operationOptions: T | undefined\n ): { span: Span; updatedOptions: T } {\n const tracer = getTracer();\n const tracingOptions = operationOptions?.tracingOptions || {};\n const spanOptions: SpanOptions = {\n kind: SpanKind.INTERNAL,\n ...tracingOptions.spanOptions\n };\n\n const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName;\n\n let span: Span;\n if (isTracingDisabled()) {\n span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT);\n } else {\n span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext);\n }\n\n if (args.namespace) {\n span.setAttribute(\"az.namespace\", args.namespace);\n }\n\n let newSpanOptions = tracingOptions.spanOptions || {};\n\n if (span.isRecording() && args.namespace) {\n newSpanOptions = {\n ...tracingOptions.spanOptions,\n attributes: {\n ...spanOptions.attributes,\n \"az.namespace\": args.namespace\n }\n };\n }\n\n const newTracingOptions: Required = {\n ...tracingOptions,\n spanOptions: newSpanOptions,\n tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span)\n };\n\n const newOperationOptions = {\n ...operationOptions,\n tracingOptions: newTracingOptions\n } as T & { tracingOptions: Required };\n\n return {\n span,\n updatedOptions: newOperationOptions\n };\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SpanContext, TraceFlags } from \"../interfaces\";\n\nconst VERSION = \"00\";\n\n/**\n * Generates a `SpanContext` given a `traceparent` header value.\n * @param traceParent - Serialized span context data as a `traceparent` header value.\n * @returns The `SpanContext` generated from the `traceparent` value.\n */\nexport function extractSpanContextFromTraceParentHeader(\n traceParentHeader: string\n): SpanContext | undefined {\n const parts = traceParentHeader.split(\"-\");\n\n if (parts.length !== 4) {\n return;\n }\n\n const [version, traceId, spanId, traceOptions] = parts;\n\n if (version !== VERSION) {\n return;\n }\n\n const traceFlags = parseInt(traceOptions, 16);\n\n const spanContext: SpanContext = {\n spanId,\n traceId,\n traceFlags\n };\n\n return spanContext;\n}\n\n/**\n * Generates a `traceparent` value given a span context.\n * @param spanContext - Contains context for a specific span.\n * @returns The `spanContext` represented as a `traceparent` value.\n */\nexport function getTraceParentHeader(spanContext: SpanContext): string | undefined {\n const missingFields: string[] = [];\n if (!spanContext.traceId) {\n missingFields.push(\"traceId\");\n }\n if (!spanContext.spanId) {\n missingFields.push(\"spanId\");\n }\n\n if (missingFields.length) {\n return;\n }\n\n const flags = spanContext.traceFlags || TraceFlags.NONE;\n const hexFlags = flags.toString(16);\n const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags;\n\n // https://www.w3.org/TR/trace-context/#traceparent-header-field-values\n return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`;\n}\n"],"names":["SpanKind","otTrace","otContext","SpanStatusCode","trace","INVALID_SPAN_CONTEXT"],"mappings":";;;;;;AAAA;AA0FA,WAAY,QAAQ;;IAElB,+CAAY,CAAA;;;;;IAKZ,2CAAU,CAAA;;;;;IAKV,2CAAU,CAAA;;;;;;IAMV,+CAAY,CAAA;;;;;;IAMZ,+CAAY,CAAA;AACd,CAAC,EAzBWA,gBAAQ,KAARA,gBAAQ,QAyBnB;AAqDD;;;;;SAKgB,OAAO,CAAC,OAAgB;IACtC,OAAOC,SAAO,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;AAClC,CAAC;AAED;;;;;;SAMgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAOA,SAAO,CAAC,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;AACxC,CAAC;AAED;;;;;;;SAOgB,cAAc,CAAC,OAAgB,EAAE,WAAwB;IACvE,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,EAAE,WAAW,CAAC,CAAC;AACtD,CAAC;AAED;;;;;SAKgB,cAAc,CAAC,OAAgB;IAC7C,OAAOA,SAAO,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;AACzC,CAAC;AAYD;;;;;;;;SAQgB,kBAAkB,CAAC,OAAoB;IACrD,OAAOA,SAAO,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AAC7C,CAAC;SAUe,SAAS,CAAC,IAAa,EAAE,OAAgB;IACvD,OAAOA,SAAO,CAAC,SAAS,CAAC,IAAI,IAAI,oBAAoB,EAAE,OAAO,CAAC,CAAC;AAClE,CAAC;AAED;MACa,OAAO,GAAeC,YAAU;AAG7C,WAAY,cAAc;;;;IAIxB,qDAAS,CAAA;;;;;IAKT,+CAAM,CAAA;;;;IAIN,qDAAS,CAAA;AACX,CAAC,EAdWC,sBAAc,KAAdA,sBAAc;;ACrP1B;AACA,SAkCgB,iBAAiB;;IAC/B,IAAI,OAAO,OAAO,KAAK,WAAW,EAAE;;QAElC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,yBAAyB,GAAG,MAAA,OAAO,CAAC,GAAG,CAAC,sBAAsB,0CAAE,WAAW,EAAE,CAAC;IAEpF,IAAI,yBAAyB,KAAK,OAAO,IAAI,yBAAyB,KAAK,GAAG,EAAE;QAC9E,OAAO,KAAK,CAAC;KACd;IAED,OAAO,OAAO,CAAC,yBAAyB,CAAC,CAAC;AAC5C,CAAC;AAED;;;;;;;;;;;;;;;;;;AAkBA,SAAgB,kBAAkB,CAAC,IAA4B;IAC7D,OAAO,UACL,aAAqB,EACrB,gBAA+B;QAE/B,MAAM,MAAM,GAAG,SAAS,EAAE,CAAC;QAC3B,MAAM,cAAc,GAAG,CAAA,gBAAgB,aAAhB,gBAAgB,uBAAhB,gBAAgB,CAAE,cAAc,KAAI,EAAE,CAAC;QAC9D,MAAM,WAAW,mBACf,IAAI,EAAEH,gBAAQ,CAAC,QAAQ,IACpB,cAAc,CAAC,WAAW,CAC9B,CAAC;QAEF,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,GAAG,GAAG,IAAI,CAAC,aAAa,IAAI,aAAa,EAAE,GAAG,aAAa,CAAC;QAE/F,IAAI,IAAU,CAAC;QACf,IAAI,iBAAiB,EAAE,EAAE;YACvB,IAAI,GAAGI,SAAK,CAAC,eAAe,CAACC,wBAAoB,CAAC,CAAC;SACpD;aAAM;YACL,IAAI,GAAG,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE,WAAW,EAAE,cAAc,CAAC,cAAc,CAAC,CAAC;SAC/E;QAED,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;SACnD;QAED,IAAI,cAAc,GAAG,cAAc,CAAC,WAAW,IAAI,EAAE,CAAC;QAEtD,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,IAAI,CAAC,SAAS,EAAE;YACxC,cAAc,mCACT,cAAc,CAAC,WAAW,KAC7B,UAAU,kCACL,WAAW,CAAC,UAAU,KACzB,cAAc,EAAE,IAAI,CAAC,SAAS,MAEjC,CAAC;SACH;QAED,MAAM,iBAAiB,mCAClB,cAAc,KACjB,WAAW,EAAE,cAAc,EAC3B,cAAc,EAAE,OAAO,CAAC,cAAc,CAAC,cAAc,IAAIH,OAAS,CAAC,MAAM,EAAE,EAAE,IAAI,CAAC,GACnF,CAAC;QAEF,MAAM,mBAAmB,GAAG,gCACvB,gBAAgB,KACnB,cAAc,EAAE,iBAAiB,GAC2B,CAAC;QAE/D,OAAO;YACL,IAAI;YACJ,cAAc,EAAE,mBAAmB;SACpC,CAAC;KACH,CAAC;AACJ,CAAC;;ACzHD;AACA;AAIA,MAAM,OAAO,GAAG,IAAI,CAAC;AAErB;;;;;AAKA,SAAgB,uCAAuC,CACrD,iBAAyB;IAEzB,MAAM,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAE3C,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;QACtB,OAAO;KACR;IAED,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,CAAC,GAAG,KAAK,CAAC;IAEvD,IAAI,OAAO,KAAK,OAAO,EAAE;QACvB,OAAO;KACR;IAED,MAAM,UAAU,GAAG,QAAQ,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAgB;QAC/B,MAAM;QACN,OAAO;QACP,UAAU;KACX,CAAC;IAEF,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;;;AAKA,SAAgB,oBAAoB,CAAC,WAAwB;IAC3D,MAAM,aAAa,GAAa,EAAE,CAAC;IACnC,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;QACxB,aAAa,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;KAC/B;IACD,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE;QACvB,aAAa,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC9B;IAED,IAAI,aAAa,CAAC,MAAM,EAAE;QACxB,OAAO;KACR;IAED,MAAM,KAAK,GAAG,WAAW,CAAC,UAAU,iBAAoB;IACxD,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC;IACpC,MAAM,UAAU,GAAG,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,IAAI,QAAQ,EAAE,GAAG,QAAQ,CAAC;;IAGrE,OAAO,GAAG,OAAO,IAAI,WAAW,CAAC,OAAO,IAAI,WAAW,CAAC,MAAM,IAAI,UAAU,EAAE,CAAC;AACjF,CAAC;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt b/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/README.md b/node_modules/@azure/core-tracing/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js b/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json b/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/package.json b/node_modules/@azure/core-tracing/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/core-tracing/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/core-tracing/package.json b/node_modules/@azure/core-tracing/package.json new file mode 100644 index 0000000000..1cd7235e6f --- /dev/null +++ b/node_modules/@azure/core-tracing/package.json @@ -0,0 +1,99 @@ +{ + "name": "@azure/core-tracing", + "version": "1.0.0-preview.13", + "description": "Provides low-level interfaces and helper methods for tracing in Azure SDK", + "sdk-type": "client", + "main": "dist/index.js", + "module": "dist-esm/src/index.js", + "browser": { + "./dist-esm/src/utils/global.js": "./dist-esm/src/utils/global.browser.js" + }, + "types": "types/core-tracing.d.ts", + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "prebuild": "npm run clean", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "files": [ + "dist/", + "dist-esm/src/", + "types/core-tracing.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "tracing", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=12.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/README.md", + "sideEffects": false, + "dependencies": { + "@opentelemetry/api": "^1.0.1", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/core-auth": "^1.3.0", + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "7.7.11", + "@opentelemetry/tracing": "^0.22.0", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "eslint": "^7.15.0", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "prettier": "^1.16.4", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "typescript": "~4.2.0", + "util": "^0.12.1", + "typedoc": "0.15.2", + "sinon": "^9.0.2", + "@types/sinon": "^9.0.4" + } +} diff --git a/node_modules/@azure/core-tracing/types/core-tracing.d.ts b/node_modules/@azure/core-tracing/types/core-tracing.d.ts new file mode 100644 index 0000000000..bdae78d066 --- /dev/null +++ b/node_modules/@azure/core-tracing/types/core-tracing.d.ts @@ -0,0 +1,531 @@ + +/** + * OpenTelemetry compatible interface for Context + */ +export declare interface Context { + /** + * Get a value from the context. + * + * @param key - key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key - context key for which to set the value + * @param value - value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key - context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} + +/** Entrypoint for context API */ +export declare const context: ContextAPI; + +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare interface ContextAPI { + /** + * Get the currently active context + */ + active(): Context; +} + +/** + * Creates a function that can be used to create spans using the global tracer. + * + * Usage: + * + * ```typescript + * // once + * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); + * + * // in each operation + * const span = createSpan("deleteConfigurationSetting", operationOptions); + * // code... + * span.end(); + * ``` + * + * @hidden + * @param args - allows configuration of the prefix for each span as well as the az.namespace field. + */ +export declare function createSpanFunction(args: CreateSpanFunctionArgs): (operationName: string, operationOptions: T | undefined) => { + span: Span; + updatedOptions: T; +}; + +/** + * Arguments for `createSpanFunction` that allow you to specify the + * prefix for each created span as well as the `az.namespace` attribute. + * + * @hidden + */ +export declare interface CreateSpanFunctionArgs { + /** + * Package name prefix. + * + * NOTE: if this is empty no prefix will be applied to created Span names. + */ + packagePrefix: string; + /** + * Service namespace + * + * NOTE: if this is empty no `az.namespace` attribute will be added to created Spans. + */ + namespace: string; +} + +/** + * An Exception for a Span. + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; + +/** + * An Exception with a code. + */ +export declare interface ExceptionWithCode { + /** The code. */ + code: string | number; + /** The name. */ + name?: string; + /** The message. */ + message?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a message. + */ +export declare interface ExceptionWithMessage { + /** The code. */ + code?: string | number; + /** The message. */ + message: string; + /** The name. */ + name?: string; + /** The stack. */ + stack?: string; +} + +/** + * An Exception with a name. + */ +export declare interface ExceptionWithName { + /** The code. */ + code?: string | number; + /** The message. */ + message?: string; + /** The name. */ + name: string; + /** The stack. */ + stack?: string; +} + +/** + * Generates a `SpanContext` given a `traceparent` header value. + * @param traceParent - Serialized span context data as a `traceparent` header value. + * @returns The `SpanContext` generated from the `traceparent` value. + */ +export declare function extractSpanContextFromTraceParentHeader(traceParentHeader: string): SpanContext | undefined; + +/** + * Return the span if one exists + * + * @param context - context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; + +/** + * Get the span context of the span if it exists. + * + * @param context - context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; + +/** + * Generates a `traceparent` value given a span context. + * @param spanContext - Contains context for a specific span. + * @returns The `spanContext` represented as a `traceparent` value. + */ +export declare function getTraceParentHeader(spanContext: SpanContext): string | undefined; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(): Tracer; + +/** + * Retrieves a tracer from the global tracer provider. + */ +export declare function getTracer(name: string, version?: string): Tracer; + +/** + * Represents high resolution time. + */ +export declare type HrTime = [number, number]; + +/** + * Returns true of the given {@link SpanContext} is valid. + * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. + * + * @param context - the {@link SpanContext} to validate. + * + * @returns true if the {@link SpanContext} is valid, false otherwise. + */ +export declare function isSpanContextValid(context: SpanContext): boolean; + +/** + * Used to specify a span that is linked to another. + */ +export declare interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} + +/** + * Tracing options to set on an operation. + */ +export declare interface OperationTracingOptions { + /** + * OpenTelemetry SpanOptions used to create a span when tracing is enabled. + */ + spanOptions?: SpanOptions; + /** + * OpenTelemetry context to use for created Spans. + */ + tracingContext?: Context; +} + +/** + * Set the span on a context + * + * @param context - context to use as parent + * @param span - span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; + +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context - context to set active span on + * @param spanContext - span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; + +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export declare interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key - the key for this attribute. + * @param value - the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes - the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name - the name of the event. + * @param attributesOrStartTime - the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is TimeInput and 3rd param is undefined + * @param startTime - start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status - the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param endTime - the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception - the exception the only accepted values are string or Error + * @param time - the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name - the Span name. + */ + updateName(name: string): this; +} + +/** + * Attributes for a Span. + */ +export declare interface SpanAttributes { + /** + * Attributes for a Span. + */ + [attributeKey: string]: SpanAttributeValue | undefined; +} + +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type SpanAttributeValue = string | number | boolean | Array | Array | Array; + +/** + * A light interface that tries to be structurally compatible with OpenTelemetry + */ +export declare interface SpanContext { + /** + * UUID of a trace. + */ + traceId: string; + /** + * UUID of a Span. + */ + spanId: string; + /** + * https://www.w3.org/TR/trace-context/#trace-flags + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} + +/** + * The kind of span. + */ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} + +/** + * An interface that enables manual propagation of Spans + */ +export declare interface SpanOptions { + /** + * Attributes to set on the Span + */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** + * The type of Span. Default to SpanKind.INTERNAL + */ + kind?: SpanKind; + /** + * A manually specified start time for the created `Span` object. + */ + startTime?: TimeInput; +} + +/** + * The status for a span. + */ +export declare interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} + +/** SpanStatusCode */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} + +/** + * Used to represent a Time. + */ +export declare type TimeInput = HrTime | number | Date; + +/** + * Shorthand enum for common traceFlags values inside SpanContext + */ +export declare const enum TraceFlags { + /** No flag set. */ + NONE = 0, + /** Caller is collecting trace information. */ + SAMPLED = 1 +} + +/** + * A Tracer. + */ +export declare interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method does NOT modify the current Context. + * + * @param name - The name of the span + * @param options - SpanOptions used for span creation + * @param context - Context to use to extract parent + * @returns The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; +} + +/** + * TraceState. + */ +export declare interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key - key of the TraceState entry. + * @param value - value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key - the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key - with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} + +export { } diff --git a/node_modules/@azure/logger/CHANGELOG.md b/node_modules/@azure/logger/CHANGELOG.md new file mode 100644 index 0000000000..8595873629 --- /dev/null +++ b/node_modules/@azure/logger/CHANGELOG.md @@ -0,0 +1,27 @@ +# Release History + +## 1.0.3 (2021-09-30) + +### Other Changes + +- Updates package to work with the react native bundler. [PR #17783](https://github.com/Azure/azure-sdk-for-js/pull/17783) + +## 1.0.2 (2021-03-04) + +- When logging in the browser, the default log function will now log messages to the corresponding console log function (e.g. `info` level is sent to `console.info()`.) PR [#14103](https://github.com/Azure/azure-sdk-for-js/pull/14103) + +## 1.0.1 (2021-01-07) + +- Updates the `tslib` dependency to version 2.x. + +## 1.0.0 (2019-10-29) + +This release marks the general availability of the `@azure/logging` package. + +- Removed the browser bundle. A browser-compatible library can still be created through the use of a bundler such as Rollup, Webpack, or Parcel. + ([#5860](https://github.com/Azure/azure-sdk-for-js/pull/5860)) + +## 1.0.0-preview.1 (2019-10-22) + +Provides methods to set log levels that enable logs in Azure SDKs that support logging. +Also supports redirecting log outputs via a method override. diff --git a/node_modules/@azure/logger/LICENSE b/node_modules/@azure/logger/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/logger/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/logger/README.md b/node_modules/@azure/logger/README.md new file mode 100644 index 0000000000..b1b3b5a2e1 --- /dev/null +++ b/node_modules/@azure/logger/README.md @@ -0,0 +1,88 @@ +# Azure Logger client library for JavaScript + +The `@azure/logger` package can be used to enable logging in the Azure SDKs for JavaScript. + +Logging can be enabled for the Azure SDK in the following ways: + +- Setting the AZURE_LOG_LEVEL environment variable +- Calling setLogLevel imported from "@azure/logger" +- Calling enable() on specific loggers +- Using the `DEBUG` environment variable. + +Note that AZURE_LOG_LEVEL, if set, takes precedence over DEBUG. Only use DEBUG without specifying AZURE_LOG_LEVEL or calling setLogLevel. + +## Getting started + +### Installation + +Install this library using npm as follows + +``` +npm install @azure/logger +``` + +## Key Concepts + +The `@azure/logger` package supports the following log levels +specified in order of most verbose to least verbose: + +- verbose +- info +- warning +- error + +When setting a log level, either programmatically or via the `AZURE_LOG_LEVEL` environment variable, +any logs that are written using a log level equal to or less than the one you choose +will be emitted. + +For example, setting the log level to `warning` will cause all logs that have the log +level `warning` or `error` to be emitted. + +## Examples + +### Example 1 - basic usage + +```js +const { EventHubClient } = require('@azure/event-hubs'); + +const logger = require('@azure/logger'); +logger.setLogLevel('info'); + +// operations will now emit info, warning, and error logs +const client = new EventHubClient(/* params */); +client.getPartitionIds() + .then(ids => { /* do work */ }) + .catch(e => { /* do work */ }); +}); +``` + +### Example 2 - redirect log output + +```js +const { AzureLogger, setLogLevel } = require("@azure/logger"); + +setLogLevel("verbose"); + +// override logging to output to console.log (default location is stderr) +AzureLogger.log = (...args) => { + console.log(...args); +}; +``` + +Using `AzureLogger`, it is possible to redirect the logging output from the Azure SDKs by +overriding the `AzureLogger.log` method. This may be useful if you want to redirect logs to +a location other than stderr. + +## Next steps + +You can build and run the tests locally by executing `rushx test`. Explore the `test` folder to see advanced usage and behavior of the public classes. + +## Troubleshooting + +If you run into issues while using this library, please feel free to [file an issue](https://github.com/Azure/azure-sdk-for-js/issues/new). + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fcore%2Flogger%2FREADME.png) diff --git a/node_modules/@azure/logger/dist-esm/src/debug.js b/node_modules/@azure/logger/dist-esm/src/debug.js new file mode 100644 index 0000000000..fecbff478b --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/debug.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { log } from "./log"; +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} +export default debugObj; +//# sourceMappingURL=debug.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/debug.js.map b/node_modules/@azure/logger/dist-esm/src/debug.js.map new file mode 100644 index 0000000000..631247408c --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/debug.js.map @@ -0,0 +1 @@ +{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAgE5B,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE;IACpB,MAAM,CAAC,gBAAgB,CAAC,CAAC;CAC1B;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB,EAAY,EAAE;IAC9B,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE;QAC9B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACtB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;SACzD;aAAM;YACL,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;SAC/C;KACF;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;KAChD;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3B,OAAO,IAAI,CAAC;KACb;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE;QACvC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC3B,OAAO,KAAK,CAAC;SACd;KACF;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE;QAChD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YACpC,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;YACxB,OAAO;SACR;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;YACnB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;SACrC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE;QACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,eAAe,QAAQ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log\n }\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/index.js b/node_modules/@azure/logger/dist-esm/src/index.js new file mode 100644 index 0000000000..116b59e7f6 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/index.js @@ -0,0 +1,103 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import debug from "./debug"; +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export const AzureLogger = debug("azure"); +AzureLogger.log = (...args) => { + debug.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debug.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +export function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose") + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debug.disable(); + debug.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) { + return true; + } + else { + return false; + } +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/index.js.map b/node_modules/@azure/logger/dist-esm/src/index.js.map new file mode 100644 index 0000000000..a9d7880e9a --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAmB,MAAM,SAAS,CAAC;AAG1C,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,IAAI,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;GAIG;AACH,MAAM,CAAC,MAAM,WAAW,GAAsB,KAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;IAC5B,KAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE;IACnB,0FAA0F;IAC1F,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE;QACpC,WAAW,CAAC,eAAe,CAAC,CAAC;KAC9B;SAAM;QACL,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;KACH;CACF;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;KACH;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE;QACtC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;YACxB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC1C;KACF;IAED,KAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI,EAAE,EAAE;QACtB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;IACtB,CAAC,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;QACxB,MAAM,iBAAiB,GAAG,KAAK,CAAC,OAAO,EAAE,CAAC;QAC1C,KAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;KAC1D;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,IAAI,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,EAAE;QACtE,OAAO,IAAI,CAAC;KACb;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { Debugger } from \"./debug\";\nexport { Debugger } from \"./debug\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \"\n )}.`\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed trbouleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n } else {\n return false;\n }\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.browser.js b/node_modules/@azure/logger/dist-esm/src/log.browser.js new file mode 100644 index 0000000000..3f69bb237f --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.browser.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function log(...args) { + if (args.length > 0) { + const firstArg = String(args[0]); + if (firstArg.includes(":error")) { + console.error(...args); + } + else if (firstArg.includes(":warning")) { + console.warn(...args); + } + else if (firstArg.includes(":info")) { + console.info(...args); + } + else if (firstArg.includes(":verbose")) { + console.debug(...args); + } + else { + console.debug(...args); + } + } +} +//# sourceMappingURL=log.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.browser.js.map b/node_modules/@azure/logger/dist-esm/src/log.browser.js.map new file mode 100644 index 0000000000..4542a4e8cd --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.browser.js","sourceRoot":"","sources":["../../src/log.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,UAAU,GAAG,CAAC,GAAG,IAAW;IAChC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;QACnB,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QACjC,IAAI,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YAC/B,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;SACvB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC;SACvB;aAAM,IAAI,QAAQ,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;YACxC,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;aAAM;YACL,OAAO,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;SACxB;KACF;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport function log(...args: any[]): void {\n if (args.length > 0) {\n const firstArg = String(args[0]);\n if (firstArg.includes(\":error\")) {\n console.error(...args);\n } else if (firstArg.includes(\":warning\")) {\n console.warn(...args);\n } else if (firstArg.includes(\":info\")) {\n console.info(...args);\n } else if (firstArg.includes(\":verbose\")) {\n console.debug(...args);\n } else {\n console.debug(...args);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.js b/node_modules/@azure/logger/dist-esm/src/log.js new file mode 100644 index 0000000000..36c9334470 --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import util from "util"; +import { EOL } from "os"; +export function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${EOL}`); +} +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/logger/dist-esm/src/log.js.map b/node_modules/@azure/logger/dist-esm/src/log.js.map new file mode 100644 index 0000000000..4b178616cc --- /dev/null +++ b/node_modules/@azure/logger/dist-esm/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,GAAG,EAAE,MAAM,IAAI,CAAC;AAEzB,MAAM,UAAU,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAG,GAAG,EAAE,CAAC,CAAC;AACjE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport util from \"util\";\nimport { EOL } from \"os\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/logger/dist/index.js b/node_modules/@azure/logger/dist/index.js new file mode 100644 index 0000000000..327fbdbecf --- /dev/null +++ b/node_modules/@azure/logger/dist/index.js @@ -0,0 +1,210 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var util = _interopDefault(require('util')); +var os = require('os'); + +// Copyright (c) Microsoft Corporation. +function log(message, ...args) { + process.stderr.write(`${util.format(message, ...args)}${os.EOL}`); +} + +// Copyright (c) Microsoft Corporation. +const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; +let enabledString; +let enabledNamespaces = []; +let skippedNamespaces = []; +const debuggers = []; +if (debugEnvVariable) { + enable(debugEnvVariable); +} +const debugObj = Object.assign((namespace) => { + return createDebugger(namespace); +}, { + enable, + enabled, + disable, + log +}); +function enable(namespaces) { + enabledString = namespaces; + enabledNamespaces = []; + skippedNamespaces = []; + const wildcard = /\*/g; + const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); + for (const ns of namespaceList) { + if (ns.startsWith("-")) { + skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); + } + else { + enabledNamespaces.push(new RegExp(`^${ns}$`)); + } + } + for (const instance of debuggers) { + instance.enabled = enabled(instance.namespace); + } +} +function enabled(namespace) { + if (namespace.endsWith("*")) { + return true; + } + for (const skipped of skippedNamespaces) { + if (skipped.test(namespace)) { + return false; + } + } + for (const enabledNamespace of enabledNamespaces) { + if (enabledNamespace.test(namespace)) { + return true; + } + } + return false; +} +function disable() { + const result = enabledString || ""; + enable(""); + return result; +} +function createDebugger(namespace) { + const newDebugger = Object.assign(debug, { + enabled: enabled(namespace), + destroy, + log: debugObj.log, + namespace, + extend + }); + function debug(...args) { + if (!newDebugger.enabled) { + return; + } + if (args.length > 0) { + args[0] = `${namespace} ${args[0]}`; + } + newDebugger.log(...args); + } + debuggers.push(newDebugger); + return newDebugger; +} +function destroy() { + const index = debuggers.indexOf(this); + if (index >= 0) { + debuggers.splice(index, 1); + return true; + } + return false; +} +function extend(namespace) { + const newDebugger = createDebugger(`${this.namespace}:${namespace}`); + newDebugger.log = this.log; + return newDebugger; +} + +// Copyright (c) Microsoft Corporation. +const registeredLoggers = new Set(); +const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; +let azureLogLevel; +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +const AzureLogger = debugObj("azure"); +AzureLogger.log = (...args) => { + debugObj.log(...args); +}; +const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; +if (logLevelFromEnv) { + // avoid calling setLogLevel because we don't want a mis-set environment variable to crash + if (isAzureLogLevel(logLevelFromEnv)) { + setLogLevel(logLevelFromEnv); + } + else { + console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); + } +} +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +function setLogLevel(level) { + if (level && !isAzureLogLevel(level)) { + throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); + } + azureLogLevel = level; + const enabledNamespaces = []; + for (const logger of registeredLoggers) { + if (shouldEnable(logger)) { + enabledNamespaces.push(logger.namespace); + } + } + debugObj.enable(enabledNamespaces.join(",")); +} +/** + * Retrieves the currently specified log level. + */ +function getLogLevel() { + return azureLogLevel; +} +const levelMap = { + verbose: 400, + info: 300, + warning: 200, + error: 100 +}; +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +function createClientLogger(namespace) { + const clientRootLogger = AzureLogger.extend(namespace); + patchLogMethod(AzureLogger, clientRootLogger); + return { + error: createLogger(clientRootLogger, "error"), + warning: createLogger(clientRootLogger, "warning"), + info: createLogger(clientRootLogger, "info"), + verbose: createLogger(clientRootLogger, "verbose") + }; +} +function patchLogMethod(parent, child) { + child.log = (...args) => { + parent.log(...args); + }; +} +function createLogger(parent, level) { + const logger = Object.assign(parent.extend(level), { + level + }); + patchLogMethod(parent, logger); + if (shouldEnable(logger)) { + const enabledNamespaces = debugObj.disable(); + debugObj.enable(enabledNamespaces + "," + logger.namespace); + } + registeredLoggers.add(logger); + return logger; +} +function shouldEnable(logger) { + if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) { + return true; + } + else { + return false; + } +} +function isAzureLogLevel(logLevel) { + return AZURE_LOG_LEVELS.includes(logLevel); +} + +exports.AzureLogger = AzureLogger; +exports.createClientLogger = createClientLogger; +exports.getLogLevel = getLogLevel; +exports.setLogLevel = setLogLevel; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/logger/dist/index.js.map b/node_modules/@azure/logger/dist/index.js.map new file mode 100644 index 0000000000..9c744a9de5 --- /dev/null +++ b/node_modules/@azure/logger/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/log.ts","../src/debug.ts","../src/index.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport util from \"util\";\nimport { EOL } from \"os\";\n\nexport function log(message: unknown, ...args: any[]): void {\n process.stderr.write(`${util.format(message, ...args)}${EOL}`);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { log } from \"./log\";\n\n/**\n * A simple mechanism for enabling logging.\n * Intended to mimic the publicly available `debug` package.\n */\nexport interface Debug {\n /**\n * Creates a new logger with the given namespace.\n */\n (namespace: string): Debugger;\n /**\n * The default log method (defaults to console)\n */\n log: (...args: any[]) => void;\n /**\n * Enables a particular set of namespaces.\n * To enable multiple separate them with commas, e.g. \"info,debug\".\n * Supports wildcards, e.g. \"azure:*\"\n * Supports skip syntax, e.g. \"azure:*,-azure:storage:*\" will enable\n * everything under azure except for things under azure:storage.\n */\n enable: (namespaces: string) => void;\n /**\n * Checks if a particular namespace is enabled.\n */\n enabled: (namespace: string) => boolean;\n /**\n * Disables all logging, returns what was previously enabled.\n */\n disable: () => string;\n}\n\n/**\n * A log function that can be dynamically enabled and redirected.\n */\nexport interface Debugger {\n /**\n * Logs the given arguments to the `log` method.\n */\n (...args: any[]): void;\n /**\n * True if this logger is active and logging.\n */\n enabled: boolean;\n /**\n * Used to cleanup/remove this logger.\n */\n destroy: () => boolean;\n /**\n * The current log method. Can be overridden to redirect output.\n */\n log: (...args: any[]) => void;\n /**\n * The namespace of this logger.\n */\n namespace: string;\n /**\n * Extends this logger with a child namespace.\n * Namespaces are separated with a ':' character.\n */\n extend: (namespace: string) => Debugger;\n}\n\nconst debugEnvVariable =\n (typeof process !== \"undefined\" && process.env && process.env.DEBUG) || undefined;\n\nlet enabledString: string | undefined;\nlet enabledNamespaces: RegExp[] = [];\nlet skippedNamespaces: RegExp[] = [];\nconst debuggers: Debugger[] = [];\n\nif (debugEnvVariable) {\n enable(debugEnvVariable);\n}\n\nconst debugObj: Debug = Object.assign(\n (namespace: string): Debugger => {\n return createDebugger(namespace);\n },\n {\n enable,\n enabled,\n disable,\n log\n }\n);\n\nfunction enable(namespaces: string): void {\n enabledString = namespaces;\n enabledNamespaces = [];\n skippedNamespaces = [];\n const wildcard = /\\*/g;\n const namespaceList = namespaces.split(\",\").map((ns) => ns.trim().replace(wildcard, \".*?\"));\n for (const ns of namespaceList) {\n if (ns.startsWith(\"-\")) {\n skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`));\n } else {\n enabledNamespaces.push(new RegExp(`^${ns}$`));\n }\n }\n for (const instance of debuggers) {\n instance.enabled = enabled(instance.namespace);\n }\n}\n\nfunction enabled(namespace: string): boolean {\n if (namespace.endsWith(\"*\")) {\n return true;\n }\n\n for (const skipped of skippedNamespaces) {\n if (skipped.test(namespace)) {\n return false;\n }\n }\n for (const enabledNamespace of enabledNamespaces) {\n if (enabledNamespace.test(namespace)) {\n return true;\n }\n }\n return false;\n}\n\nfunction disable(): string {\n const result = enabledString || \"\";\n enable(\"\");\n return result;\n}\n\nfunction createDebugger(namespace: string): Debugger {\n const newDebugger: Debugger = Object.assign(debug, {\n enabled: enabled(namespace),\n destroy,\n log: debugObj.log,\n namespace,\n extend\n });\n\n function debug(...args: any[]): void {\n if (!newDebugger.enabled) {\n return;\n }\n if (args.length > 0) {\n args[0] = `${namespace} ${args[0]}`;\n }\n newDebugger.log(...args);\n }\n\n debuggers.push(newDebugger);\n\n return newDebugger;\n}\n\nfunction destroy(this: Debugger): boolean {\n const index = debuggers.indexOf(this);\n if (index >= 0) {\n debuggers.splice(index, 1);\n return true;\n }\n return false;\n}\n\nfunction extend(this: Debugger, namespace: string): Debugger {\n const newDebugger = createDebugger(`${this.namespace}:${namespace}`);\n newDebugger.log = this.log;\n return newDebugger;\n}\n\nexport default debugObj;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport debug, { Debugger } from \"./debug\";\nexport { Debugger } from \"./debug\";\n\nconst registeredLoggers = new Set();\nconst logLevelFromEnv =\n (typeof process !== \"undefined\" && process.env && process.env.AZURE_LOG_LEVEL) || undefined;\n\nlet azureLogLevel: AzureLogLevel | undefined;\n\n/**\n * The AzureLogger provides a mechanism for overriding where logs are output to.\n * By default, logs are sent to stderr.\n * Override the `log` method to redirect logs to another location.\n */\nexport const AzureLogger: AzureClientLogger = debug(\"azure\");\nAzureLogger.log = (...args) => {\n debug.log(...args);\n};\n\n/**\n * The log levels supported by the logger.\n * The log levels in order of most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport type AzureLogLevel = \"verbose\" | \"info\" | \"warning\" | \"error\";\nconst AZURE_LOG_LEVELS = [\"verbose\", \"info\", \"warning\", \"error\"];\n\ntype AzureDebugger = Debugger & { level: AzureLogLevel };\n\n/**\n * An AzureClientLogger is a function that can log to an appropriate severity level.\n */\nexport type AzureClientLogger = Debugger;\n\nif (logLevelFromEnv) {\n // avoid calling setLogLevel because we don't want a mis-set environment variable to crash\n if (isAzureLogLevel(logLevelFromEnv)) {\n setLogLevel(logLevelFromEnv);\n } else {\n console.error(\n `AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(\n \", \"\n )}.`\n );\n }\n}\n\n/**\n * Immediately enables logging at the specified log level.\n * @param level - The log level to enable for logging.\n * Options from most verbose to least verbose are:\n * - verbose\n * - info\n * - warning\n * - error\n */\nexport function setLogLevel(level?: AzureLogLevel): void {\n if (level && !isAzureLogLevel(level)) {\n throw new Error(\n `Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(\",\")}`\n );\n }\n azureLogLevel = level;\n\n const enabledNamespaces = [];\n for (const logger of registeredLoggers) {\n if (shouldEnable(logger)) {\n enabledNamespaces.push(logger.namespace);\n }\n }\n\n debug.enable(enabledNamespaces.join(\",\"));\n}\n\n/**\n * Retrieves the currently specified log level.\n */\nexport function getLogLevel(): AzureLogLevel | undefined {\n return azureLogLevel;\n}\n\nconst levelMap = {\n verbose: 400,\n info: 300,\n warning: 200,\n error: 100\n};\n\n/**\n * Defines the methods available on the SDK-facing logger.\n */\n// eslint-disable-next-line @typescript-eslint/no-redeclare\nexport interface AzureLogger {\n /**\n * Used for failures the program is unlikely to recover from,\n * such as Out of Memory.\n */\n error: Debugger;\n /**\n * Used when a function fails to perform its intended task.\n * Usually this means the function will throw an exception.\n * Not used for self-healing events (e.g. automatic retry)\n */\n warning: Debugger;\n /**\n * Used when a function operates normally.\n */\n info: Debugger;\n /**\n * Used for detailed trbouleshooting scenarios. This is\n * intended for use by developers / system administrators\n * for diagnosing specific failures.\n */\n verbose: Debugger;\n}\n\n/**\n * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`.\n * @param namespace - The name of the SDK package.\n * @hidden\n */\nexport function createClientLogger(namespace: string): AzureLogger {\n const clientRootLogger: AzureClientLogger = AzureLogger.extend(namespace);\n patchLogMethod(AzureLogger, clientRootLogger);\n return {\n error: createLogger(clientRootLogger, \"error\"),\n warning: createLogger(clientRootLogger, \"warning\"),\n info: createLogger(clientRootLogger, \"info\"),\n verbose: createLogger(clientRootLogger, \"verbose\")\n };\n}\n\nfunction patchLogMethod(parent: AzureClientLogger, child: AzureClientLogger | AzureDebugger): void {\n child.log = (...args) => {\n parent.log(...args);\n };\n}\n\nfunction createLogger(parent: AzureClientLogger, level: AzureLogLevel): AzureDebugger {\n const logger: AzureDebugger = Object.assign(parent.extend(level), {\n level\n });\n\n patchLogMethod(parent, logger);\n\n if (shouldEnable(logger)) {\n const enabledNamespaces = debug.disable();\n debug.enable(enabledNamespaces + \",\" + logger.namespace);\n }\n\n registeredLoggers.add(logger);\n\n return logger;\n}\n\nfunction shouldEnable(logger: AzureDebugger): boolean {\n if (azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]) {\n return true;\n } else {\n return false;\n }\n}\n\nfunction isAzureLogLevel(logLevel: string): logLevel is AzureLogLevel {\n return AZURE_LOG_LEVELS.includes(logLevel as any);\n}\n"],"names":["EOL","debug"],"mappings":";;;;;;;;;AAAA;SAMgB,GAAG,CAAC,OAAgB,EAAE,GAAG,IAAW;IAClD,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,GAAGA,MAAG,EAAE,CAAC,CAAC;AACjE;;ACRA;AACA,AAkEA,MAAM,gBAAgB,GACpB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,KAAK,KAAK,SAAS,CAAC;AAEpF,IAAI,aAAiC,CAAC;AACtC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,IAAI,iBAAiB,GAAa,EAAE,CAAC;AACrC,MAAM,SAAS,GAAe,EAAE,CAAC;AAEjC,IAAI,gBAAgB,EAAE;IACpB,MAAM,CAAC,gBAAgB,CAAC,CAAC;CAC1B;AAED,MAAM,QAAQ,GAAU,MAAM,CAAC,MAAM,CACnC,CAAC,SAAiB;IAChB,OAAO,cAAc,CAAC,SAAS,CAAC,CAAC;AACnC,CAAC,EACD;IACE,MAAM;IACN,OAAO;IACP,OAAO;IACP,GAAG;CACJ,CACF,CAAC;AAEF,SAAS,MAAM,CAAC,UAAkB;IAChC,aAAa,GAAG,UAAU,CAAC;IAC3B,iBAAiB,GAAG,EAAE,CAAC;IACvB,iBAAiB,GAAG,EAAE,CAAC;IACvB,MAAM,QAAQ,GAAG,KAAK,CAAC;IACvB,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC,OAAO,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC;IAC5F,KAAK,MAAM,EAAE,IAAI,aAAa,EAAE;QAC9B,IAAI,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;YACtB,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;SACzD;aAAM;YACL,iBAAiB,CAAC,IAAI,CAAC,IAAI,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC;SAC/C;KACF;IACD,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE;QAChC,QAAQ,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC;KAChD;AACH,CAAC;AAED,SAAS,OAAO,CAAC,SAAiB;IAChC,IAAI,SAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;QAC3B,OAAO,IAAI,CAAC;KACb;IAED,KAAK,MAAM,OAAO,IAAI,iBAAiB,EAAE;QACvC,IAAI,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YAC3B,OAAO,KAAK,CAAC;SACd;KACF;IACD,KAAK,MAAM,gBAAgB,IAAI,iBAAiB,EAAE;QAChD,IAAI,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,EAAE;YACpC,OAAO,IAAI,CAAC;SACb;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,OAAO;IACd,MAAM,MAAM,GAAG,aAAa,IAAI,EAAE,CAAC;IACnC,MAAM,CAAC,EAAE,CAAC,CAAC;IACX,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,WAAW,GAAa,MAAM,CAAC,MAAM,CAAC,KAAK,EAAE;QACjD,OAAO,EAAE,OAAO,CAAC,SAAS,CAAC;QAC3B,OAAO;QACP,GAAG,EAAE,QAAQ,CAAC,GAAG;QACjB,SAAS;QACT,MAAM;KACP,CAAC,CAAC;IAEH,SAAS,KAAK,CAAC,GAAG,IAAW;QAC3B,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;YACxB,OAAO;SACR;QACD,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE;YACnB,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;SACrC;QACD,WAAW,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;KAC1B;IAED,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;IAE5B,OAAO,WAAW,CAAC;AACrB,CAAC;AAED,SAAS,OAAO;IACd,MAAM,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACtC,IAAI,KAAK,IAAI,CAAC,EAAE;QACd,SAAS,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC;QAC3B,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,MAAM,CAAiB,SAAiB;IAC/C,MAAM,WAAW,GAAG,cAAc,CAAC,GAAG,IAAI,CAAC,SAAS,IAAI,SAAS,EAAE,CAAC,CAAC;IACrE,WAAW,CAAC,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC;IAC3B,OAAO,WAAW,CAAC;AACrB,CAAC;;AC1KD;AACA,AAKA,MAAM,iBAAiB,GAAG,IAAI,GAAG,EAAiB,CAAC;AACnD,MAAM,eAAe,GACnB,CAAC,OAAO,OAAO,KAAK,WAAW,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,GAAG,CAAC,eAAe,KAAK,SAAS,CAAC;AAE9F,IAAI,aAAwC,CAAC;AAE7C;;;;;AAKA,MAAa,WAAW,GAAsBC,QAAK,CAAC,OAAO,CAAC,CAAC;AAC7D,WAAW,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI;IACxBA,QAAK,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;AACrB,CAAC,CAAC;AAWF,MAAM,gBAAgB,GAAG,CAAC,SAAS,EAAE,MAAM,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;AASjE,IAAI,eAAe,EAAE;;IAEnB,IAAI,eAAe,CAAC,eAAe,CAAC,EAAE;QACpC,WAAW,CAAC,eAAe,CAAC,CAAC;KAC9B;SAAM;QACL,OAAO,CAAC,KAAK,CACX,6CAA6C,eAAe,iDAAiD,gBAAgB,CAAC,IAAI,CAChI,IAAI,CACL,GAAG,CACL,CAAC;KACH;CACF;AAED;;;;;;;;;AASA,SAAgB,WAAW,CAAC,KAAqB;IAC/C,IAAI,KAAK,IAAI,CAAC,eAAe,CAAC,KAAK,CAAC,EAAE;QACpC,MAAM,IAAI,KAAK,CACb,sBAAsB,KAAK,yBAAyB,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CACjF,CAAC;KACH;IACD,aAAa,GAAG,KAAK,CAAC;IAEtB,MAAM,iBAAiB,GAAG,EAAE,CAAC;IAC7B,KAAK,MAAM,MAAM,IAAI,iBAAiB,EAAE;QACtC,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;YACxB,iBAAiB,CAAC,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC1C;KACF;IAEDA,QAAK,CAAC,MAAM,CAAC,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,CAAC;AAED;;;AAGA,SAAgB,WAAW;IACzB,OAAO,aAAa,CAAC;AACvB,CAAC;AAED,MAAM,QAAQ,GAAG;IACf,OAAO,EAAE,GAAG;IACZ,IAAI,EAAE,GAAG;IACT,OAAO,EAAE,GAAG;IACZ,KAAK,EAAE,GAAG;CACX,CAAC;AA8BF;;;;;AAKA,SAAgB,kBAAkB,CAAC,SAAiB;IAClD,MAAM,gBAAgB,GAAsB,WAAW,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;IAC1E,cAAc,CAAC,WAAW,EAAE,gBAAgB,CAAC,CAAC;IAC9C,OAAO;QACL,KAAK,EAAE,YAAY,CAAC,gBAAgB,EAAE,OAAO,CAAC;QAC9C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;QAClD,IAAI,EAAE,YAAY,CAAC,gBAAgB,EAAE,MAAM,CAAC;QAC5C,OAAO,EAAE,YAAY,CAAC,gBAAgB,EAAE,SAAS,CAAC;KACnD,CAAC;AACJ,CAAC;AAED,SAAS,cAAc,CAAC,MAAyB,EAAE,KAAwC;IACzF,KAAK,CAAC,GAAG,GAAG,CAAC,GAAG,IAAI;QAClB,MAAM,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,CAAC;KACrB,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,MAAyB,EAAE,KAAoB;IACnE,MAAM,MAAM,GAAkB,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE;QAChE,KAAK;KACN,CAAC,CAAC;IAEH,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAE/B,IAAI,YAAY,CAAC,MAAM,CAAC,EAAE;QACxB,MAAM,iBAAiB,GAAGA,QAAK,CAAC,OAAO,EAAE,CAAC;QAC1CA,QAAK,CAAC,MAAM,CAAC,iBAAiB,GAAG,GAAG,GAAG,MAAM,CAAC,SAAS,CAAC,CAAC;KAC1D;IAED,iBAAiB,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAE9B,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,YAAY,CAAC,MAAqB;IACzC,IAAI,aAAa,IAAI,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,QAAQ,CAAC,aAAa,CAAC,EAAE;QACtE,OAAO,IAAI,CAAC;KACb;SAAM;QACL,OAAO,KAAK,CAAC;KACd;AACH,CAAC;AAED,SAAS,eAAe,CAAC,QAAgB;IACvC,OAAO,gBAAgB,CAAC,QAAQ,CAAC,QAAe,CAAC,CAAC;AACpD,CAAC;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt b/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/README.md b/node_modules/@azure/logger/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/logger/node_modules/tslib/modules/index.js b/node_modules/@azure/logger/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/logger/node_modules/tslib/modules/package.json b/node_modules/@azure/logger/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/package.json b/node_modules/@azure/logger/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts b/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.html b/node_modules/@azure/logger/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/logger/node_modules/tslib/tslib.js b/node_modules/@azure/logger/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/logger/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/logger/package.json b/node_modules/@azure/logger/package.json new file mode 100644 index 0000000000..6ca97736a2 --- /dev/null +++ b/node_modules/@azure/logger/package.json @@ -0,0 +1,109 @@ +{ + "name": "@azure/logger", + "sdk-type": "client", + "version": "1.0.3", + "description": "Microsoft Azure SDK for JavaScript - Logger", + "main": "./dist/index.js", + "module": "dist-esm/src/index.js", + "browser": { + "./dist-esm/src/log.js": "./dist-esm/src/log.browser.js", + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/src/index.js" + }, + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:samples": "echo Obsolete", + "build:test": "tsc -p . && rollup -c 2>&1", + "build": "npm run clean && tsc -p . && rollup -c 2>&1 && api-extractor run --local --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* temp types *.tgz *.log", + "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", + "execute:samples": "echo skipped", + "extract-api": "tsc -p . && api-extractor run --local", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts", + "pack": "npm pack 2>&1", + "pretest": "npm run build:test", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "types": "./types/logger.d.ts", + "files": [ + "dist/", + "dist-esm/src/", + "types/logger.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "log", + "logger", + "logging", + "node.js", + "typescript", + "javascript", + "browser", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/logger/README.md", + "sideEffects": false, + "dependencies": { + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/sinon": "^9.0.4", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "delay": "^4.2.0", + "dotenv": "^8.2.0", + "eslint": "^7.15.0", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^1.18.0", + "nyc": "^14.0.0", + "prettier": "^1.16.4", + "puppeteer": "^10.2.0", + "rimraf": "^3.0.0", + "rollup": "^1.16.3", + "sinon": "^9.0.2", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "typedoc": "0.15.2" + } +} diff --git a/node_modules/@azure/logger/types/logger.d.ts b/node_modules/@azure/logger/types/logger.d.ts new file mode 100644 index 0000000000..9e2a63dcc5 --- /dev/null +++ b/node_modules/@azure/logger/types/logger.d.ts @@ -0,0 +1,104 @@ +/** + * An AzureClientLogger is a function that can log to an appropriate severity level. + */ +export declare type AzureClientLogger = Debugger; + +/** + * The AzureLogger provides a mechanism for overriding where logs are output to. + * By default, logs are sent to stderr. + * Override the `log` method to redirect logs to another location. + */ +export declare const AzureLogger: AzureClientLogger; + +/** + * Defines the methods available on the SDK-facing logger. + */ +export declare interface AzureLogger { + /** + * Used for failures the program is unlikely to recover from, + * such as Out of Memory. + */ + error: Debugger; + /** + * Used when a function fails to perform its intended task. + * Usually this means the function will throw an exception. + * Not used for self-healing events (e.g. automatic retry) + */ + warning: Debugger; + /** + * Used when a function operates normally. + */ + info: Debugger; + /** + * Used for detailed trbouleshooting scenarios. This is + * intended for use by developers / system administrators + * for diagnosing specific failures. + */ + verbose: Debugger; +} + +/** + * The log levels supported by the logger. + * The log levels in order of most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare type AzureLogLevel = "verbose" | "info" | "warning" | "error"; + +/** + * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. + * @param namespace - The name of the SDK package. + * @hidden + */ +export declare function createClientLogger(namespace: string): AzureLogger; + +/** + * A log function that can be dynamically enabled and redirected. + */ +export declare interface Debugger { + /** + * Logs the given arguments to the `log` method. + */ + (...args: any[]): void; + /** + * True if this logger is active and logging. + */ + enabled: boolean; + /** + * Used to cleanup/remove this logger. + */ + destroy: () => boolean; + /** + * The current log method. Can be overridden to redirect output. + */ + log: (...args: any[]) => void; + /** + * The namespace of this logger. + */ + namespace: string; + /** + * Extends this logger with a child namespace. + * Namespaces are separated with a ':' character. + */ + extend: (namespace: string) => Debugger; +} + +/** + * Retrieves the currently specified log level. + */ +export declare function getLogLevel(): AzureLogLevel | undefined; + +/** + * Immediately enables logging at the specified log level. + * @param level - The log level to enable for logging. + * Options from most verbose to least verbose are: + * - verbose + * - info + * - warning + * - error + */ +export declare function setLogLevel(level?: AzureLogLevel): void; + +export { } diff --git a/node_modules/@azure/ms-rest-js/LICENSE b/node_modules/@azure/ms-rest-js/LICENSE new file mode 100644 index 0000000000..21071075c2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. All rights reserved. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@azure/ms-rest-js/README.md b/node_modules/@azure/ms-rest-js/README.md new file mode 100644 index 0000000000..7beda9cd77 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/README.md @@ -0,0 +1,41 @@ +# ms-rest-js + +[![Build Status](https://dev.azure.com/azure-public/azsdk/_apis/build/status/public.Azure.ms-rest-js%20-%20CI)](https://dev.azure.com/azure-public/azsdk/_build/latest?definitionId=39) + +Runtime for isomorphic javascript libraries (that work in the browser and node.js environment) generated via [Autorest](https://github.com/Azure/Autorest). + +## Requirements +- Node.js version > 6.x +- `npm install -g typescript` + +## Installation +- After cloning the repo, execute `npm install` + +## Execution + +### Node.js +- Set the subscriptionId and token as instructed in `samples/node-samples.ts` +- Run `npx ts-node samples/node-sample.js` + +### In the browser +- Run `npm run build` +- Set the subscriptionId and token then +- Open index.html file in the browser. It should show the response from GET request on the storage account. From Chrome type Ctrl + Shift + I and you can see the logs in console. + +## Architecture Overview + +You can find an explanation of how this repository's code works by going to our [architecture overview](https://github.com/Azure/ms-rest-js/blob/master/docs/architectureOverview.md). + +# Contributing + +This project welcomes contributions and suggestions. Most contributions require you to agree to a +Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us +the rights to use your contribution. For details, visit https://cla.microsoft.com. + +When you submit a pull request, a CLA-bot will automatically determine whether you need to provide +a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions +provided by the bot. You will only need to do this once across all repos using our CLA. + +This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). +For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or +contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments. diff --git a/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt new file mode 100644 index 0000000000..8c622df8ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/ThirdPartyNotices.txt @@ -0,0 +1,35 @@ +Third Party Notices for ms-rest-js + +This project incorporates material from the project(s) listed below (collectively, Third Party Code). +Microsoft, Inc. Microsoft is not the original author of the Third Party Code. +The original copyright notice and license, under which Microsoft received such Third Party Code, +are set out below. This Third Party Code is licensed to you under their original license terms set forth below. +Microsoft reserves all other rights not expressly granted, whether by implication, estoppel or otherwise. + +1. uuid (https://github.com/kelektiv/node-uuid) + +%% uuid NOTICES AND INFORMATION BEGIN HERE +========================================= +The MIT License (MIT) + +Copyright (c) 2010-2016 Robert Kieffer and other contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +========================================= +END OF uuid NOTICES AND INFORMATION \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js new file mode 100644 index 0000000000..b2ab5406e7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js @@ -0,0 +1,4420 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +(function (global, factory) { + typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : + typeof define === 'function' && define.amd ? define(['exports'], factory) : + (global = global || self, factory(global.msRest = {})); +}(this, (function (exports) { 'use strict'; + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ + function getHeaderKey(headerName) { + return headerName.toLowerCase(); + } + function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; + } + /** + * A collection of HTTP header key/value pairs. + */ + var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Encodes a string in base64 format. + * @param value the string to encode + */ + function encodeString(value) { + return btoa(value); + } + /** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ + function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); + } + /** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ + function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } + + // Unique ID creation requires a high quality random # generator. In the browser we therefore + // require the crypto API and do not support built-in fallback to lower quality random number + // generators (like Math.random()). + var getRandomValues; + var rnds8 = new Uint8Array(16); + function rng() { + // lazy load so that environments that need to polyfill have a chance to do so + if (!getRandomValues) { + // getRandomValues needs to be invoked in a context where "this" is a Crypto implementation. Also, + // find the complete implementation of crypto (msCrypto) on IE11. + getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto); + + if (!getRandomValues) { + throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported'); + } + } + + return getRandomValues(rnds8); + } + + var REGEX = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; + + function validate(uuid) { + return typeof uuid === 'string' && REGEX.test(uuid); + } + + /** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ + + var byteToHex = []; + + for (var i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); + } + + function stringify(arr) { + var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!validate(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; + } + + function v4(options, buf, offset) { + options = options || {}; + var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided + + if (buf) { + offset = offset || 0; + + for (var i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } + + return buf; + } + + return stringify(rnds); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.2", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, + }; + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A constant that indicates whether the environment is node.js or browser based. + */ + var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + /** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ + function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); + } + /** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ + function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; + } + /** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ + function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; + } + /** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ + function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); + } + /** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ + function generateUuid() { + return v4(); + } + /** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ + function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; + } + /** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ + function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); + } + /** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ + function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; + } + /** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ + function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; + } + function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; + } + /** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ + function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); + } + var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + /** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ + function isDuration(value) { + return validateISODuration.test(value); + } + /** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ + function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); + } + /** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ + function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; + }()); + function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); + } + function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); + } + function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); + } + function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; + } + function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); + } + function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); + } + function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; + } + function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; + } + function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; + } + function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; + } + function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; + } + function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; + } + function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; + } + /** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ + function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; + } + function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; + } + function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); + } + function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; + } + function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; + } + function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; + } + function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; + } + function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); + } + function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); + } + // TODO: why is this here? + function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; + } + /** + * Utility function to create a K:V from a list of strings + */ + function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; + } + var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", + ]); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; + } + /** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ + var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; + }()); + + /*! ***************************************************************************** + Copyright (c) Microsoft Corporation. + + Permission to use, copy, modify, and/or distribute this software for any + purpose with or without fee is hereby granted. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH + REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY + AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, + INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM + LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR + OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR + PERFORMANCE OF THIS SOFTWARE. + ***************************************************************************** */ + /* global Reflect, Promise */ + + var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + + function __extends(d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + } + + var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); + }; + + function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + } + + function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + } + + function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; + }(Error)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ + var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; + }()); + function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } + } + // exported locally for testing + function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; + } + function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + + // Copyright (c) Microsoft Corporation. + // Licensed under the MIT license. + /** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ + function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + /** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ + function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); + } + function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + var _a, _b; + var parser = new DOMParser(); + // Policy to make our code Trusted Types compliant. + // https://github.com/w3c/webappsec-trusted-types + // We are calling DOMParser.parseFromString() to parse XML payload from Azure services. + // The parsed DOM object is not exposed to outside. Scripts are disabled when parsing + // according to the spec. There are no HTML/XSS security concerns on the usage of + // parseFromString() here. + var ttPolicy; + if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: function (s) { return s; }, + }); + } + function parseXML(str) { + var _a; + try { + var dom = parser.parseFromString(((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _a !== void 0 ? _a : str), "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } + } + var errorNS = ""; + try { + var invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = (_b = parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + } + function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } + } + function isElement(node) { + return !!node.attributes; + } + /** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ + function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; + } + function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; + } + // tslint:disable-next-line:no-null-keyword + var doc = document.implementation.createDocument(null, null, null); + var serializer = new XMLSerializer(); + function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); + } + function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; + } + function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; + }()); + /** + * Optional properties that can be used when creating a RequestPolicy. + */ + var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ + function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; + } + var defaultJsonContentTypes = ["application/json", "text/json"]; + var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + /** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ + var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; + }(BaseRequestPolicy)); + function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; + } + function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; + } + function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); + } + function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + var DEFAULT_CLIENT_RETRY_COUNT = 3; + var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ + var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ + function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; + } + var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + // Licensed under the MIT License. See License.txt in the project root for license information. + function getDefaultUserAgentKey() { + return "x-ms-command-name"; + } + function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; + } + function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); + } + var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; + } + function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; + } + var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * A class that handles the query portion of a URLBuilder. + */ + var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; + }()); + /** + * A class that handles creating, modifying, and parsing URLs. + */ + var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; + }()); + var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; + }()); + /** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ + function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); + } + /** + * A class that tokenizes URL strings. + */ + var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; + }()); + /** + * Read the remaining characters from this Tokenizer's character stream. + */ + function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; + } + /** + * Whether or not this URLTokenizer has a current character. + */ + function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; + } + /** + * Get the character in the text string at the current index. + */ + function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; + } + /** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ + function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } + } + /** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ + function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); + } + /** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ + function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; + } + /** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ + function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); + } + /** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ + function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); + } + function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } + } + function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + } + function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } + } + function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, + }; + function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; + } + var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; + }(BaseRequestPolicy)); + function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); + } + function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; + } + + function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; + } + var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; + }(BaseRequestPolicy)); + function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); + } + /** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ + function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; + } + /** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ + function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; + } + /** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ + function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; + } + /** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ + function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); + } + /** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ + function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; + } + var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; + } + /** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ + var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; + }(BaseRequestPolicy)); + /** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ + function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; + } + /** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ + function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; + } + function retry$1(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + (function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; + })(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; + } + var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + function getDefaultProxySettings(_proxyUrl) { + return undefined; + } + function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; + } + var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var StatusCodes = Constants.HttpConstants.StatusCodes; + var DEFAULT_RETRY_COUNT = 3; + function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; + } + /** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ + var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; + }(BaseRequestPolicy)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + /** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ + var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", + ]; + /** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ + var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * @class + * Initializes a new instance of the ServiceClient. + */ + var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new XhrHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; + }()); + function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } + } + function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; + } + function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; + } + function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy()); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; + } + function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); + } + function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; + } + function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; + } + function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; + } + var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; + }(BaseRequestPolicy)); + function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); + } + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; + /** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ + var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var HeaderConstants$1 = Constants.HeaderConstants; + var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; + var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + /** + * Authenticates to a service using an API key. + */ + var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; + }()); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; + }(ApiKeyCredentials)); + + // Copyright (c) Microsoft Corporation. All rights reserved. + var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; + }(ApiKeyCredentials)); + + exports.ApiKeyCredentials = ApiKeyCredentials; + exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; + exports.BaseRequestPolicy = BaseRequestPolicy; + exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; + exports.Constants = Constants; + exports.DefaultHttpClient = XhrHttpClient; + exports.DomainCredentials = DomainCredentials; + exports.HttpHeaders = HttpHeaders; + exports.MapperType = MapperType; + exports.RequestPolicyOptions = RequestPolicyOptions; + exports.RestError = RestError; + exports.Serializer = Serializer; + exports.ServiceClient = ServiceClient; + exports.TokenCredentials = TokenCredentials; + exports.TopicCredentials = TopicCredentials; + exports.URLBuilder = URLBuilder; + exports.URLQuery = URLQuery; + exports.WebResource = WebResource; + exports.agentPolicy = agentPolicy; + exports.applyMixins = applyMixins; + exports.delay = delay; + exports.deserializationPolicy = deserializationPolicy; + exports.deserializeResponseBody = deserializeResponseBody; + exports.encodeUri = encodeUri; + exports.executePromisesSequentially = executePromisesSequentially; + exports.exponentialRetryPolicy = exponentialRetryPolicy; + exports.flattenResponse = flattenResponse; + exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; + exports.generateUuid = generateUuid; + exports.getDefaultProxySettings = getDefaultProxySettings; + exports.getDefaultUserAgentValue = getDefaultUserAgentValue; + exports.isDuration = isDuration; + exports.isNode = isNode; + exports.isValidUuid = isValidUuid; + exports.logPolicy = logPolicy; + exports.promiseToCallback = promiseToCallback; + exports.promiseToServiceCallback = promiseToServiceCallback; + exports.proxyPolicy = proxyPolicy; + exports.redirectPolicy = redirectPolicy; + exports.serializeObject = serializeObject; + exports.signingPolicy = signingPolicy; + exports.stripRequest = stripRequest; + exports.stripResponse = stripResponse; + exports.systemErrorRetryPolicy = systemErrorRetryPolicy; + exports.throttlingRetryPolicy = throttlingRetryPolicy; + exports.userAgentPolicy = userAgentPolicy; + + Object.defineProperty(exports, '__esModule', { value: true }); + +}))); +//# sourceMappingURL=msRest.browser.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map new file mode 100644 index 0000000000..b27ee4dea3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.browser.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.browser.ts","../node_modules/uuid/dist/esm-browser/rng.js","../node_modules/uuid/dist/esm-browser/regex.js","../node_modules/uuid/dist/esm-browser/validate.js","../node_modules/uuid/dist/esm-browser/stringify.js","../node_modules/uuid/dist/esm-browser/v4.js","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/tslib/tslib.es6.js","../lib/restError.ts","../lib/xhrHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.browser.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.browser.ts","../lib/policies/userAgentPolicy.ts","../lib/url.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.browser.ts","../lib/policies/proxyPolicy.browser.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return btoa(value);\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n let str = \"\";\n for (let i = 0; i < value.length; i++) {\n str += String.fromCharCode(value[i]);\n }\n return btoa(str);\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n}\n","// Unique ID creation requires a high quality random # generator. In the browser we therefore\n// require the crypto API and do not support built-in fallback to lower quality random number\n// generators (like Math.random()).\nvar getRandomValues;\nvar rnds8 = new Uint8Array(16);\nexport default function rng() {\n // lazy load so that environments that need to polyfill have a chance to do so\n if (!getRandomValues) {\n // getRandomValues needs to be invoked in a context where \"this\" is a Crypto implementation. Also,\n // find the complete implementation of crypto (msCrypto) on IE11.\n getRandomValues = typeof crypto !== 'undefined' && crypto.getRandomValues && crypto.getRandomValues.bind(crypto) || typeof msCrypto !== 'undefined' && typeof msCrypto.getRandomValues === 'function' && msCrypto.getRandomValues.bind(msCrypto);\n\n if (!getRandomValues) {\n throw new Error('crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported');\n }\n }\n\n return getRandomValues(rnds8);\n}","export default /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;","import REGEX from './regex.js';\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && REGEX.test(uuid);\n}\n\nexport default validate;","import validate from './validate.js';\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\n\nvar byteToHex = [];\n\nfor (var i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr) {\n var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.2\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\n\n// Policy to make our code Trusted Types compliant.\n// https://github.com/w3c/webappsec-trusted-types\n// We are calling DOMParser.parseFromString() to parse XML payload from Azure services.\n// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing\n// according to the spec. There are no HTML/XSS security concerns on the usage of\n// parseFromString() here.\nlet ttPolicy: Pick | undefined;\nif (typeof self.trustedTypes !== \"undefined\") {\n ttPolicy = self.trustedTypes.createPolicy(\"@azure/ms-rest-js#xml.browser\", {\n createHTML: (s) => s,\n });\n}\n\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n const invalidXML = (ttPolicy?.createHTML(\"INVALID\") ?? \"INVALID\") as string;\n errorNS =\n parser.parseFromString(invalidXML, \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","HttpPipelineLogLevel","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;IAAA;IACA;IAEA;;;IAGA,SAAS,YAAY,CAAC,UAAkB;QACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;IAClC,CAAC;aA4Ee,iBAAiB,CAAC,MAAY;QAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YACzC,OAAO,KAAK,CAAC;SACd;QAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;YACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;YAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;YAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;YACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;YACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;YACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;YACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;QAME,qBAAY,UAA2B;YACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,UAAU,EAAE;gBACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;oBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;iBAC9C;aACF;SACF;;;;;;;QAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;YACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;gBAC3C,IAAI,EAAE,UAAU;gBAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;aAC9B,CAAC;SACH;;;;;;QAOM,yBAAG,GAAV,UAAW,UAAkB;YAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;SAC3C;;;;QAKM,8BAAQ,GAAf,UAAgB,UAAkB;YAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;SACrD;;;;;;QAOM,4BAAM,GAAb,UAAc,UAAkB;YAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;YAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;YAClD,OAAO,MAAM,CAAC;SACf;;;;QAKM,gCAAU,GAAjB;YACE,IAAM,MAAM,GAAmB,EAAE,CAAC;YAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;gBACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;aAClD;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,OAAO,GAAiB,EAAE,CAAC;YACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;gBACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;aAC3C;YACD,OAAO,OAAO,CAAC;SAChB;;;;QAKM,iCAAW,GAAlB;YACE,IAAM,WAAW,GAAa,EAAE,CAAC;YACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;aACnC;YACD,OAAO,WAAW,CAAC;SACpB;;;;QAKM,kCAAY,GAAnB;YACE,IAAM,YAAY,GAAa,EAAE,CAAC;YAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;YAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;aACrC;YACD,OAAO,YAAY,CAAC;SACrB;;;;QAKM,4BAAM,GAAb;YACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;SAC1B;;;;QAKM,8BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACtC;;;;QAKM,2BAAK,GAAZ;YACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;SAC3C;QACH,kBAAC;IAAD,CAAC;;ICrOD;IACA;IAEA;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;IACrB,CAAC;IAED;;;;AAIA,aAAgB,eAAe,CAAC,KAAiB;QAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;QACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;SACtC;QACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;IACnB,CAAC;IAED;;;;AAIA,aAAgB,YAAY,CAAC,KAAa;QACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;SACnC;QACD,OAAO,GAAG,CAAC;IACb,CAAC;;IClCD;IACA;IACA;IACA,IAAI,eAAe,CAAC;IACpB,IAAI,KAAK,GAAG,IAAI,UAAU,CAAC,EAAE,CAAC,CAAC;AAC/B,IAAe,SAAS,GAAG,GAAG;IAC9B;IACA,EAAE,IAAI,CAAC,eAAe,EAAE;IACxB;IACA;IACA,IAAI,eAAe,GAAG,OAAO,MAAM,KAAK,WAAW,IAAI,MAAM,CAAC,eAAe,IAAI,MAAM,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,OAAO,QAAQ,KAAK,WAAW,IAAI,OAAO,QAAQ,CAAC,eAAe,KAAK,UAAU,IAAI,QAAQ,CAAC,eAAe,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACrP;IACA,IAAI,IAAI,CAAC,eAAe,EAAE;IAC1B,MAAM,MAAM,IAAI,KAAK,CAAC,0GAA0G,CAAC,CAAC;IAClI,KAAK;IACL,GAAG;AACH;IACA,EAAE,OAAO,eAAe,CAAC,KAAK,CAAC,CAAC;IAChC;;AClBA,gBAAe,qHAAqH;;sIAAC,lICErI,SAAS,QAAQ,CAAC,IAAI,EAAE;IACxB,EAAE,OAAO,OAAO,IAAI,KAAK,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACtD,CAAC;;ICHD;IACA;IACA;IACA;AACA;IACA,IAAI,SAAS,GAAG,EAAE,CAAC;AACnB;IACA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,EAAE,CAAC,EAAE;IAC9B,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,QAAQ,CAAC,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;IACrD,CAAC;AACD;IACA,SAAS,SAAS,CAAC,GAAG,EAAE;IACxB,EAAE,IAAI,MAAM,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,SAAS,GAAG,SAAS,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC;IACrF;IACA;IACA,EAAE,IAAI,IAAI,GAAG,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,EAAE,CAAC,CAAC,EAAE,WAAW,EAAE,CAAC;IACzgB;IACA;IACA;IACA;AACA;IACA,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;IACvB,IAAI,MAAM,SAAS,CAAC,6BAA6B,CAAC,CAAC;IACnD,GAAG;AACH;IACA,EAAE,OAAO,IAAI,CAAC;IACd,CAAC;;ICxBD,SAAS,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE;IAClC,EAAE,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;IAC1B,EAAE,IAAI,IAAI,GAAG,OAAO,CAAC,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,IAAI,GAAG,GAAG,CAAC;AACtD;IACA,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;IAClC,EAAE,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC;IACA,EAAE,IAAI,GAAG,EAAE;IACX,IAAI,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACzB;IACA,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,EAAE,CAAC,EAAE;IACjC,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC;IAChC,KAAK;AACL;IACA,IAAI,OAAO,GAAG,CAAC;IACf,GAAG;AACH;IACA,EAAE,OAAO,SAAS,CAAC,IAAI,CAAC,CAAC;IACzB,CAAC;;ICrBD;IACA;AAEA,QAAa,SAAS,GAAG;;;;;;QAMvB,aAAa,EAAE,OAAO;;;;;;;QAQtB,IAAI,EAAE,OAAO;;;;;;;QAQb,KAAK,EAAE,QAAQ;;;;;;;QAQf,UAAU,EAAE,YAAY;;;;;;;QAQxB,WAAW,EAAE,aAAa;;;;QAK1B,QAAQ,EAAE,UAAU;;;;QAKpB,SAAS,EAAE,WAAW;QAEtB,aAAa,EAAE;;;;;;;YAOb,SAAS,EAAE;gBACT,GAAG,EAAE,KAAK;gBACV,GAAG,EAAE,KAAK;gBACV,MAAM,EAAE,QAAQ;gBAChB,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;gBACd,IAAI,EAAE,MAAM;gBACZ,KAAK,EAAE,OAAO;aACf;YAED,WAAW,EAAE;gBACX,eAAe,EAAE,GAAG;aACrB;SACF;;;;QAKD,eAAe,EAAE;;;;;;;YAOf,aAAa,EAAE,eAAe;YAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;YAU9B,WAAW,EAAE,aAAa;;;;;;;YAQ1B,UAAU,EAAE,YAAY;SACzB;KACF;;IC3GD;AACA,IAQA;;;AAGA,QAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;QAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;QACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;QAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,IAUA;;;;;;AAMA,aAAgB,SAAS,CAAC,GAAW;QACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;aAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;aACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;aACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;;;AAQA,aAAgB,aAAa,CAAC,QAA+B;QAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;QACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;QAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;QAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;QAC1C,OAAO,gBAAgB,CAAC;IAC1B,CAAC;IAED;;;;;;;;AAQA,aAAgB,YAAY,CAAC,OAAwB;QACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;QACxC,IAAI,eAAe,CAAC,OAAO,EAAE;YAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;SACjD;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED;;;;;;;AAOA,aAAgB,WAAW,CAAC,IAAY;QACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;QACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACnC,CAAC;AAED,IA4BA;;;;;AAKA,aAAgB,YAAY;QAC1B,OAAOA,EAAM,EAAE,CAAC;IAClB,CAAC;IAED;;;;;;;;;;;AAWA,aAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;QACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;YACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;SACtC,CAAC,CAAC;QACH,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,IAeA;;;;;;AAMA,aAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;QAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;IACvE,CAAC;IAqBD;;;;;;AAMA,aAAgB,iBAAiB,CAAC,OAAqB;QACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAY;YAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;gBACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;aACrB,EACD,UAAC,GAAU;gBACT,EAAE,CAAC,GAAG,CAAC,CAAC;aACT,CACF,CAAC;SACH,CAAC;IACJ,CAAC;IAED;;;;;AAKA,aAAgB,wBAAwB,CAAI,OAAuC;QACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;SACzD;QACD,OAAO,UAAC,EAAsB;YAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;gBAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;aAC3E,EACD,UAAC,GAAU;gBACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;aAC3B,CACF,CAAC;SACH,CAAC;IACJ,CAAC;AAED,aAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;QAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACb;QACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;IAChC,CAAC;IAED;;;;;AAKA,aAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;QAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;YAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;gBAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;aAC1D,CAAC,CAAC;SACJ,CAAC,CAAC;IACL,CAAC;IAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;IAElM;;;;;AAKA,aAAgB,UAAU,CAAC,KAAa;QACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACzC,CAAC;IAED;;;;;;;AAOA,aAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;QAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;IAC5F,CAAC;IAED;;;;;;AAMA,aAAgB,eAAe,CAAC,KAAU;QACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;IACtF,CAAC;;IC/RD;AACA;QAME,oBACkB,YAAyC,EACzC,KAAe;YADf,6BAAA,EAAA,iBAAyC;YAAzC,iBAAY,GAAZ,YAAY,CAA6B;YACzC,UAAK,GAAL,KAAK,CAAU;SAC7B;QAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;YAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;gBACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;aACH,CAAC;YACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;gBACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;gBACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;oBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;oBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;iBACtD;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;oBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;iBACtC;gBACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;oBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;iBACxC;gBACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;oBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC1C;gBACD,IAAI,OAAO,EAAE;oBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;oBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;wBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;qBACpC;iBACF;gBACD,IACE,WAAW;oBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;oBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;iBAC5C;aACF;SACF;;;;;;;;;;;;QAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;YACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;YACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAC7C,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;aAC9B;;;;;;;;;;YAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;YAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;aACvD;YACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;aAC/D;YACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;gBACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;aAClD;YAED,IAAI,MAAM,IAAI,SAAS,EAAE;gBACvB,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM;;gBAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;gBACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;oBACxC,OAAO,GAAG,MAAM,CAAC;iBAClB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;oBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;iBAC/D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;oBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;oBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;iBAChF;qBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;oBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBAC9D;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;iBACtD;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACrF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACzF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;iBACvF;aACF;YACD,OAAO,OAAO,CAAC;SAChB;;;;;;;;;;;;QAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;YAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;oBAIzE,YAAY,GAAG,EAAE,CAAC;iBACnB;;gBAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;oBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;iBACpC;gBACD,OAAO,YAAY,CAAC;aACrB;YAED,IAAI,OAAY,CAAC;YACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;YACpC,IAAI,CAAC,UAAU,EAAE;gBACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;aACrC;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC/F;iBAAM;gBACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;oBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;qBAClC;iBACF;gBAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;oBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;wBAClB,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;oBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;wBAC3B,OAAO,GAAG,IAAI,CAAC;qBAChB;yBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;wBACnC,OAAO,GAAG,KAAK,CAAC;qBACjB;yBAAM;wBACL,OAAO,GAAG,YAAY,CAAC;qBACxB;iBACF;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;oBACzF,OAAO,GAAG,YAAY,CAAC;iBACxB;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;oBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;iBAClC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;iBACxC;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;iBAC7C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;oBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;iBAC9C;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;oBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;iBAC7F;qBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;oBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,UAAU,EAAE;gBACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC;SAChB;QACH,iBAAC;IAAD,CAAC,IAAA;IAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;QACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;QACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;YAC1C,EAAE,GAAG,CAAC;SACP;QACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,iBAAiB,CAAC,MAAW;QACpC,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;;QAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;QAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IACnE,CAAC;IAED,SAAS,oBAAoB,CAAC,GAAW;QACvC,IAAI,CAAC,GAAG,EAAE;YACR,OAAO,SAAS,CAAC;SAClB;QACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;;QAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;QAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;IAClC,CAAC;IAED,SAAS,kBAAkB,CAAC,IAAwB;QAClD,IAAM,OAAO,GAAa,EAAE,CAAC;QAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;QACtB,IAAI,IAAI,EAAE;YACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;YAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;gBAAxB,IAAM,IAAI,iBAAA;gBACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;oBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;iBACvD;qBAAM;oBACL,YAAY,IAAI,IAAI,CAAC;oBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;oBAC3B,YAAY,GAAG,EAAE,CAAC;iBACnB;aACF;SACF;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IAED,SAAS,cAAc,CAAC,CAAgB;QACtC,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;SAC3B;QACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;IAClD,CAAC;IAED,SAAS,cAAc,CAAC,CAAS;QAC/B,IAAI,CAAC,CAAC,EAAE;YACN,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC5B,CAAC;IAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;QAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;YACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;oBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;iBAC9E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;iBAChF;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;oBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;iBACH;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;oBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;iBAC/E;aACF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;gBAChC,IACE,UAAU,KAAK,QAAQ;oBACvB,UAAU,KAAK,UAAU;oBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;oBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;oBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;iBACH;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;QAClF,IAAI,CAAC,aAAa,EAAE;YAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;SACH;QACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;YACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;aACnD;YACD,OAAO,IAAI,KAAK,KAAK,CAAC;SACvB,CAAC,CAAC;QACH,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;SACH;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;SACvC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;QAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;gBAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;aAC9D;YACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;SAClC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;QAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;YACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBACvC,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK;oBACH,KAAK,YAAY,IAAI;0BACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;0BACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;aACtD;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;iBAC5F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;gBACzD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;iBAC7F;gBACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;aACrF;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IACE,EACE,KAAK,YAAY,IAAI;qBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;oBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;wBAChF,mDAAmD,CACtD,CAAC;iBACH;gBACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;aAC/B;iBAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;oBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;iBACH;gBACD,KAAK,GAAG,KAAK,CAAC;aACf;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;QAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;SACzD;QACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;SACzE;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;QAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;SAC1D;QACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;YAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;SACH;QACD,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;YAAlC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;SAC5F;QACD,OAAO,cAAc,CAAC;IACxB,CAAC;IAED;;;;;IAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;QAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;QAC7C,IAAI,CAAC,UAAU,EAAE;YACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;YACxC,IAAI,CAAC,SAAS,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;aACH;YAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;YACvD,IAAI,CAAC,WAAW,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;aACnF;YACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;YAC9C,IAAI,CAAC,UAAU,EAAE;gBACf,MAAM,IAAI,KAAK,CACb,qDAAqD;qBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;aACH;SACF;QAED,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;SACzE;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;YACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;gBAAtC,IAAM,GAAG,SAAA;gBACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;gBACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;oBAC3B,SAAS;iBACV;gBAED,IAAI,QAAQ,SAAoB,CAAC;gBACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;gBAChC,IAAI,UAAU,CAAC,KAAK,EAAE;oBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;qBACnC;yBAAM;wBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;qBACpE;iBACF;qBAAM;oBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;oBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;oBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;wBAAzB,IAAM,QAAQ,cAAA;wBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;wBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;4BACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;yBAC7B;wBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACvC;iBACF;gBAED,IAAI,YAAY,IAAI,SAAS,EAAE;oBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;0BAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;0BAChD,UAAU,CAAC;oBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;oBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;oBAC5F,IACE,wBAAwB;wBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;wBAC3C,WAAW,IAAI,SAAS,EACxB;wBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;qBACrC;oBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;oBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;wBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;4BAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;4BACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC5C;6BAAM,IAAI,cAAc,CAAC,YAAY,EAAE;4BACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;yBAChF;6BAAM;4BACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;yBAC1C;qBACF;iBACF;aACF;YAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;YACpE,IAAI,0BAA0B,EAAE;gBAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;wCAC/B,cAAc;oBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;oBAC5E,IAAI,oBAAoB,EAAE;wBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;qBACH;;gBARH,KAAK,IAAM,cAAc,IAAI,MAAM;4BAAxB,cAAc;iBASxB;aACF;YAED,OAAO,OAAO,CAAC;SAChB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,oBAAoB,CAAC,YAAoB;QAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;IAC3C,CAAC;IAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;QAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;YAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;SACnF;QAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;QAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;QAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;YAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;YACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;YACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;gBACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;aACxD;YAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;YAC3F,IAAI,sBAAsB,EAAE;gBAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;gBAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAA9C,IAAM,SAAS,SAAA;oBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;wBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;qBACH;oBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;iBACtC;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;aAC5B;iBAAM,IAAI,UAAU,CAAC,KAAK,EAAE;gBAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;oBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;iBACH;qBAAM;oBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;oBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;oBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;wBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;wBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;wBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;wBAC3D,IAAI,kBAAkB,EAAE;4BACtB,iBAAiB,GAAG,EAAE,CAAC;yBACxB;qBACF;oBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;iBACH;aACF;iBAAM;;gBAEL,IAAI,gBAAgB,SAAA,CAAC;gBACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;gBAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAArB,IAAM,IAAI,cAAA;oBACb,IAAI,CAAC,GAAG;wBAAE,MAAM;oBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;iBACjB;gBACD,gBAAgB,GAAG,GAAG,CAAC;gBACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;gBAUtE,IACE,wBAAwB;oBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;oBAC3C,gBAAgB,IAAI,SAAS,EAC7B;oBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;iBAC1C;gBAED,IAAI,eAAe,SAAA,CAAC;;gBAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;oBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;oBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;oBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;wBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;4BACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;yBAC5B;qBACF;oBACD,QAAQ,GAAG,aAAa,CAAC;iBAC1B;qBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;oBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;oBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;iBACjC;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;gBACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;oBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;oBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;wBACjC,OAAO,KAAK,CAAC;qBACd;iBACF;gBACD,OAAO,IAAI,CAAC;aACb,CAAC;YAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;gBAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;oBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;iBACH;aACF;SACF;aAAM,IAAI,YAAY,EAAE;YACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;oBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;oBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;oBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBACnC;aACF;SACF;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;QAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;iBACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;YAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAAxC,IAAM,GAAG,SAAA;gBACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;aACpF;YACD,OAAO,cAAc,CAAC;SACvB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;QAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;QACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;iBACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;SACH;QACD,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;gBAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;aAC/B;YAED,IAAM,SAAS,GAAG,EAAE,CAAC;YACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;aACxF;YACD,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;QAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAC5F,IAAI,wBAAwB,EAAE;YAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;YAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;gBAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;gBACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;oBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;oBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;0BAC3B,kBAAkB;0BAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;oBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;oBACrF,IAAI,iBAAiB,EAAE;wBACrB,MAAM,GAAG,iBAAiB,CAAC;qBAC5B;iBACF;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;QAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;YACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;YACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;IACJ,CAAC;IAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;QAClF,QACE,QAAQ;YACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;YACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;IACJ,CAAC;IAoHD;AACA,aAAgB,eAAe,CAAC,WAAgB;QAC9C,IAAI,WAAW,IAAI,SAAS;YAAE,OAAO,SAAS,CAAC;QAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;YACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;YAClD,OAAO,WAAW,CAAC;SACpB;aAAM,IAAI,WAAW,YAAY,IAAI,EAAE;YACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;SAClC;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;YACrC,IAAM,KAAK,GAAG,EAAE,CAAC;YACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;aAC7C;YACD,OAAO,KAAK,CAAC;SACd;aAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;YAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;YAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;gBAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;aAC/D;YACD,OAAO,UAAU,CAAC;SACnB;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;;IAGA,SAAS,OAAO,CAAmB,CAAW;QAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;QACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;YAAhB,IAAM,GAAG,UAAA;YACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,QAAa,UAAU,GAAG,OAAO,CAAC;QAChC,WAAW;QACX,SAAS;QACT,WAAW;QACX,WAAW;QACX,MAAM;QACN,UAAU;QACV,iBAAiB;QACjB,YAAY;QACZ,MAAM;QACN,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,QAAQ;QACR,QAAQ;QACR,UAAU;QACV,UAAU;KACX,CAAC;;ICtiCF;AACA,aAgKgB,iBAAiB,CAAC,MAAW;QAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,KAAK,CAAC;SACd;QACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;YAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;YACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;YAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;YACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;YACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;YACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;YACA,OAAO,IAAI,CAAC;SACb;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;AAQA;QAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;YAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;YACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;YAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;YAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;YAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;YAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;YACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;SACpC;;;;;;QAOD,+CAAyB,GAAzB;YACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;aACjD;SACF;;;;;;QAOD,6BAAO,GAAP,UAAQ,OAA8B;YACpC,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;aAC/C;YAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;aACrD;YAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;aACH;YAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;iBACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;gBACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;;YAGD,IAAI,OAAO,CAAC,GAAG,EAAE;gBACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;oBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;iBAC1D;gBACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;aACxB;;YAGD,IAAI,OAAO,CAAC,MAAM,EAAE;gBAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;gBAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;wBACrB,OAAO,CAAC,MAAM;wBACd,4CAA4C;wBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;iBACH;aACF;YACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;YAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;gBAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;gBACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;oBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;iBACnE;gBACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;oBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;iBAClD;gBACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,IAAI,KAAG,GACL,OAAO;qBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;qBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;gBACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;gBAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;oBAC/B,IAAI,CAAC,gBAAc,EAAE;wBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;qBACH;oBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;wBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;wBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;wBAC5E,IACE,SAAS,KAAK,IAAI;4BAClB,SAAS,KAAK,SAAS;4BACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;4BACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;iCAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;iCACJ,8EAA0E,aAAa,kCAA6B,CAAA;iCACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;yBACH;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;yBACxD;wBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;4BAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;gCACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;6BACH;4BACD,IAAI,SAAS,CAAC,eAAe,EAAE;gCAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;6BAC1C;iCAAM;gCACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;6BAC9D;yBACF;qBACF,CAAC,CAAC;iBACJ;gBACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;aAChB;;YAGD,IAAI,OAAO,CAAC,eAAe,EAAE;gBAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;gBAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;oBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;wBAC3E,yFAAqF;wBACrF,mJAA2I,CAC9I,CAAC;iBACH;;gBAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;iBACjB;;gBAED,IAAM,WAAW,GAAG,EAAE,CAAC;;gBAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;gBAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;oBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;oBACxD,IAAI,UAAU,EAAE;wBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;4BACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;yBAC7D;6BAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;gCACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;6BACH;4BACD,IAAI,UAAU,CAAC,eAAe,EAAE;gCAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;gCAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;6BAC/C;iCAAM;gCACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;gCAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;6BACnE;yBACF;qBACF;iBACF;;gBAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACnC;;YAGD,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;gBAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAAlD,IAAM,UAAU,SAAA;oBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;iBACnD;aACF;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;gBACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;aAC9C;;YAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;gBAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;aAC5D;;YAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;gBACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;aACrE;;YAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;YACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;gBAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;oBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;wBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;qBAClD;oBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;wBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;qBAC9D;iBACF;qBAAM;oBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;wBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;qBACH;oBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;wBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;qBAC1C;iBACF;aACF;YAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;YACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;YAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;YAErD,OAAO,IAAI,CAAC;SACb;;;;;QAMD,2BAAK,GAAL;YACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;YAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;aACjC;YAED,IAAI,IAAI,CAAC,aAAa,EAAE;gBACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;gBAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;aACnD;YAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;gBAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;aAC/D;YAED,OAAO,MAAM,CAAC;SACf;QACH,kBAAC;IAAD,CAAC;;IChhBD;IACA;AACA;IACA;IACA;AACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;AACA;IACA,IAAI,aAAa,GAAG,SAAS,CAAC,EAAE,CAAC,EAAE;IACnC,IAAI,aAAa,GAAG,MAAM,CAAC,cAAc;IACzC,SAAS,EAAE,SAAS,EAAE,EAAE,EAAE,YAAY,KAAK,IAAI,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,EAAE,CAAC;IACpF,QAAQ,UAAU,CAAC,EAAE,CAAC,EAAE,EAAE,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACnF,IAAI,OAAO,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC;AACF;AACA,IAAO,SAAS,SAAS,CAAC,CAAC,EAAE,CAAC,EAAE;IAChC,IAAI,aAAa,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;IACxB,IAAI,SAAS,EAAE,GAAG,EAAE,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE;IAC3C,IAAI,CAAC,CAAC,SAAS,GAAG,CAAC,KAAK,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,SAAS,GAAG,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;IACzF,CAAC;AACD;AACA,IAAO,IAAI,QAAQ,GAAG,WAAW;IACjC,IAAI,QAAQ,GAAG,MAAM,CAAC,MAAM,IAAI,SAAS,QAAQ,CAAC,CAAC,EAAE;IACrD,QAAQ,KAAK,IAAI,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE;IAC7D,YAAY,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,CAAC;IAC7B,YAAY,KAAK,IAAI,CAAC,IAAI,CAAC,EAAE,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACzF,SAAS;IACT,QAAQ,OAAO,CAAC,CAAC;IACjB,MAAK;IACL,IAAI,OAAO,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,SAAS,CAAC,CAAC;IAC3C,EAAC;AACD,AA2BA;AACA,IAAO,SAAS,SAAS,CAAC,OAAO,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE;IAC7D,IAAI,SAAS,KAAK,CAAC,KAAK,EAAE,EAAE,OAAO,KAAK,YAAY,CAAC,GAAG,KAAK,GAAG,IAAI,CAAC,CAAC,UAAU,OAAO,EAAE,EAAE,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE;IAChH,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,GAAG,OAAO,CAAC,EAAE,UAAU,OAAO,EAAE,MAAM,EAAE;IAC/D,QAAQ,SAAS,SAAS,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACnG,QAAQ,SAAS,QAAQ,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE;IACtG,QAAQ,SAAS,IAAI,CAAC,MAAM,EAAE,EAAE,MAAM,CAAC,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC,EAAE;IACtH,QAAQ,IAAI,CAAC,CAAC,SAAS,GAAG,SAAS,CAAC,KAAK,CAAC,OAAO,EAAE,UAAU,IAAI,EAAE,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC;IAC9E,KAAK,CAAC,CAAC;IACP,CAAC;AACD;AACA,IAAO,SAAS,WAAW,CAAC,OAAO,EAAE,IAAI,EAAE;IAC3C,IAAI,IAAI,CAAC,GAAG,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,EAAE,GAAG,EAAE,EAAE,EAAE,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IACrH,IAAI,OAAO,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,OAAO,MAAM,KAAK,UAAU,KAAK,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,WAAW,EAAE,OAAO,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;IAC7J,IAAI,SAAS,IAAI,CAAC,CAAC,EAAE,EAAE,OAAO,UAAU,CAAC,EAAE,EAAE,OAAO,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE;IACtE,IAAI,SAAS,IAAI,CAAC,EAAE,EAAE;IACtB,QAAQ,IAAI,CAAC,EAAE,MAAM,IAAI,SAAS,CAAC,iCAAiC,CAAC,CAAC;IACtE,QAAQ,OAAO,CAAC,EAAE,IAAI;IACtB,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC,CAAC;IACzK,YAAY,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,KAAK,CAAC,CAAC;IACpD,YAAY,QAAQ,EAAE,CAAC,CAAC,CAAC;IACzB,gBAAgB,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM;IAC9C,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC;IACxE,gBAAgB,KAAK,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS;IACjE,gBAAgB,KAAK,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IACjE,gBAAgB;IAChB,oBAAoB,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,EAAE;IAChI,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,KAAK,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,EAAE;IAC1G,oBAAoB,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,MAAM,EAAE;IACzF,oBAAoB,IAAI,CAAC,IAAI,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,MAAM,EAAE;IACvF,oBAAoB,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;IAC1C,oBAAoB,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CAAC,CAAC,SAAS;IAC3C,aAAa;IACb,YAAY,EAAE,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC;IACvC,SAAS,CAAC,OAAO,CAAC,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAE;IAClE,QAAQ,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,IAAI,EAAE,IAAI,EAAE,CAAC;IACzF,KAAK;IACL,CAAC;AACD,AA4CA;AACA,IAAO,SAAS,cAAc,GAAG;IACjC,IAAI,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,IAAI,SAAS,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IACxF,IAAI,KAAK,IAAI,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE;IACpD,QAAQ,KAAK,IAAI,CAAC,GAAG,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,EAAE,CAAC,EAAE;IACzE,YAAY,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;IACxB,IAAI,OAAO,CAAC,CAAC;IACb,CAAC;;IC3JD;AACA;QAK+B,6BAAK;QAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;YANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;YAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;YACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;YAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;SAClD;QAzBe,4BAAkB,GAAW,oBAAoB,CAAC;QAClD,+BAAqB,GAAW,uBAAuB,CAAC;QACxD,qBAAW,GAAW,aAAa,CAAC;QAwBtD,gBAAC;KAAA,CA3B8B,KAAK;;ICNpC;AACA,IAQA;;;AAGA;QAAA;SAwGC;QAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;YAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YAED,IAAI,OAAO,CAAC,aAAa,EAAE;gBACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;aACvE;YAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;YACxC,IAAI,WAAW,EAAE;gBACf,IAAM,UAAQ,GAAG;oBACf,GAAG,CAAC,KAAK,EAAE,CAAC;iBACb,CAAC;gBACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;gBAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;wBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;qBACpD;iBACF,CAAC,CAAC;aACJ;YAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;YAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;gBACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;gBAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;gBACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;oBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;wBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;qBACrD;yBAAM;wBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;qBAChC;iBACF,CAAC;gBACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;oBAAxC,IAAM,OAAO,SAAA;oBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;oBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;wBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;4BACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;yBACxC;qBACF;yBAAM;wBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;qBACrC;iBACF;gBAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;gBAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;gBAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;oBAEpE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iBACxC;aACF;YAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;YACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;gBAAhD,IAAM,MAAM,SAAA;gBACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;aACjD;YACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,GAAG,MAAM,GAAG,MAAM,CAAC;;YAGhE,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,GAAG,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC;YAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;gBAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;oBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;;wBAEvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;4BACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;gCACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;iCACvB,CAAC,CAAC;gCACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;6BAC7C,CAAC,CAAC;4BACH,OAAO,CAAC;gCACN,OAAO,SAAA;gCACP,MAAM,EAAE,GAAG,CAAC,MAAM;gCAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;gCAC1B,QAAQ,UAAA;6BACT,CAAC,CAAC;yBACJ;qBACF,CAAC,CAAC;oBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;iBAAM;gBACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;oBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;wBAC3B,OAAA,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;yBAC7B,CAAC;qBAAA,CACH,CAAC;oBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;iBAC7C,CAAC,CAAC;aACJ;SACF;QACH,oBAAC;IAAD,CAAC,IAAA;IAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;QAEpD,IAAI,QAAQ,EAAE;YACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;gBACxC,OAAA,QAAQ,CAAC;oBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;iBAC7B,CAAC;aAAA,CACH,CAAC;SACH;IACH,CAAC;IAED;AACA,aAAgB,YAAY,CAAC,GAAmB;QAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;QAC1C,IAAM,WAAW,GAAG,GAAG;aACpB,qBAAqB,EAAE;aACvB,IAAI,EAAE;aACN,KAAK,CAAC,SAAS,CAAC,CAAC;QACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;YAA3B,IAAM,IAAI,oBAAA;YACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;YACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;YAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;SAC9C;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;IAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;QAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;YAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;SAAA,CACF,CAAC;QACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;YAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;SAAA,CACF,CAAC;IACJ,CAAC;;ICjLD;AACA,IAKA,WAAY,oBAAoB;;;;QAI9B,6DAAG,CAAA;;;;QAKH,iEAAK,CAAA;;;;QAKL,qEAAO,CAAA;;;;QAKP,+DAAI,CAAA;IACN,CAAC,EApBWG,4BAAoB,KAApBA,4BAAoB,QAoB/B;;IC1BD;IACA;IA2EA;;;;;AAKA,aAAgB,iBAAiB,CAAC,UAAmB;;;;;;QAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;QACF,QACE,cAAc;YACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;aAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;IACJ,CAAC;;IChGD;IACA;IAkDA;;;;;AAKA,aAAgB,0BAA0B,CAAC,SAA6B;QACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;IACnF,CAAC;AAED,aAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;QAEd,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,MAAM,GAAG,aAAa,CAAC;SACxB;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SAClC;aAAM;YACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;SACjC;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICzED;AACA,aAmFgB,iBAAiB,CAAC,aAA4B;QAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;QACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;YAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;YACjF,IACE,iBAAiB,CAAC,UAAU;gBAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;gBACA,MAAM,GAAG,IAAI,CAAC;gBACd,MAAM;aACP;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;;ICjGD;IACA;;IAEA,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;IAE/B;IACA;IACA;IACA;IACA;IACA;IACA,IAAI,QAA2D,CAAC;IAChE,IAAI,OAAO,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE;QAC5C,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,+BAA+B,EAAE;YACzE,UAAU,EAAE,UAAC,CAAC,IAAK,OAAA,CAAC,GAAA;SACrB,CAAC,CAAC;KACJ;AAED,aAAgB,QAAQ,CAAC,GAAW;;QAClC,IAAI;YACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,QAAE,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,GAAG,oCAAK,GAAG,GAAa,iBAAiB,CAAC,CAAC;YACpG,YAAY,CAAC,GAAG,CAAC,CAAC;YAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;SAC7B;QAAC,OAAO,GAAG,EAAE;YACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;IACH,CAAC;IAED,IAAI,OAAO,GAAG,EAAE,CAAC;IACjB,IAAI;QACF,IAAM,UAAU,UAAI,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS,oCAAK,SAAS,CAAW,CAAC;QAC5E,OAAO,SACL,MAAM,CAAC,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;aAClF,YAAa,mCAAI,EAAE,CAAC;KAC1B;IAAC,OAAO,OAAO,EAAE;;KAEjB;IAED,SAAS,YAAY,CAAC,GAAa;QACjC,IAAI,OAAO,EAAE;YACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACxE,IAAI,YAAY,CAAC,MAAM,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;aAClD;SACF;IACH,CAAC;IAED,SAAS,SAAS,CAAC,IAAU;QAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;IACxC,CAAC;IAED;;;;IAIA,SAAS,uBAAuB,CAAC,IAAU;QACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,GAAG,IAAI,GAAG,SAAS,CAAC;IACpE,CAAC;IAED,SAAS,WAAW,CAAC,IAAU;QAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;QAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;YACb,cAAc,KAAK,CAAC;YACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;YAC1C,cAAc,CAAC,SAAS;YAC1B,SAAS,CAAC;QAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;QACjF,IAAI,qBAAqB,EAAE;YACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;YAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;gBAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;gBACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;aAC7C;YAED,IAAI,kBAAkB,EAAE;gBACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;aAClC;SACF;aAAM,IAAI,cAAc,KAAK,CAAC,EAAE;YAC/B,MAAM,GAAG,EAAE,CAAC;SACb;aAAM,IAAI,kBAAkB,EAAE;YAC7B,MAAM,GAAG,kBAAkB,CAAC;SAC7B;QAED,IAAI,CAAC,kBAAkB,EAAE;YACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;gBACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;;gBAEjC,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;oBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;oBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;wBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;qBACtC;yBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;wBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;qBAC1C;yBAAM;wBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;qBAChE;iBACF;aACF;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;IACA,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;IACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,aAAgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;QACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,KAAK,MAAM,CAAC;QACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;QACxC,QACE,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,EAC7F;IACJ,CAAC;IAED,SAAS,eAAe,CAAC,KAAgD;QACvE,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;YAAjC,IAAM,GAAG,SAAA;YACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;YACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;YACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACnB;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;QAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;YAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;YAClC,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;YAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;gBAAxB,IAAM,SAAS,YAAA;gBAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;oBAAlD,IAAM,KAAK,SAAA;oBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;iBACpB;aACF;YACD,OAAO,MAAM,CAAC;SACf;aAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;YAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;YAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;gBAA/B,IAAM,GAAG,SAAA;gBACZ,IAAI,GAAG,KAAK,GAAG,EAAE;oBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;wBAAzC,IAAM,IAAI,SAAA;wBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;qBACpC;iBACF;qBAAM;oBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;wBAAzC,IAAM,KAAK,SAAA;wBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;qBACzB;iBACF;aACF;YACD,OAAO,CAAC,IAAI,CAAC,CAAC;SACf;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;SAChE;IACH,CAAC;;ICpKD;AACA;QAmBE,2BACW,WAA0B,EAC1B,QAAkC;YADlC,gBAAW,GAAX,WAAW,CAAe;YAC1B,aAAQ,GAAR,QAAQ,CAA0B;SACzC;;;;;;QASG,qCAAS,GAAhB,UAAiB,QAA8B;YAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;SAC1C;;;;;;;QAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACtC;QACH,wBAAC;IAAD,CAAC,IAAA;IAsBD;;;AAGA;QACE,8BAAoB,OAA4B;YAA5B,YAAO,GAAP,OAAO,CAAqB;SAAI;;;;;;QAO7C,wCAAS,GAAhB,UAAiB,QAA8B;YAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;gBACd,QAAQ,KAAKA,4BAAoB,CAAC,GAAG;gBACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;SACH;;;;;;;QAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;YACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;gBAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;aACrC;SACF;QACH,2BAAC;IAAD,CAAC;;ICjGD;AACA,IAmCA;;;;AAIA,aAAgB,qBAAqB,CACnC,2BAAyD;QAEzD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;AAED,IAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,IAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;IAElF;;;;IAIA;QAA2C,yCAAiB;QAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;YAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;YAJC,KAAI,CAAC,gBAAgB;gBACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;YAC/F,KAAI,CAAC,eAAe;gBAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;SAC9F;QAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;oBAC/C,sBAAO,IAAI,CAAC,WAAW;6BACpB,WAAW,CAAC,OAAO,CAAC;6BACpB,IAAI,CAAC,UAAC,QAA+B;4BACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;yBAAA,CAC/E,EAAC;;;SACL;QACH,4BAAC;IAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;IAED,SAAS,oBAAoB,CAC3B,cAAqC;QAErC,IAAI,MAAqC,CAAC;QAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;QACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;QACvE,IAAI,aAAa,EAAE;YACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;YAC1E,IAAI,CAAC,uBAAuB,EAAE;gBAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;aACzD;iBAAM;gBACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aACjE;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,yBAAyB,CAAC,cAAqC;QACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;QAC3C,IAAI,MAAe,CAAC;QACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;YACnC,MAAM,GAAG,IAAI,CAAC;SACf;aAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;YACjD,MAAM,GAAG,iBAAiB,CAAC;SAC5B;aAAM;YACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;SAC5C;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;QAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;YAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;YAC7E,IAAI,iBAAiB,EAAE;gBACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;gBACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;oBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;oBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;oBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;yBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;oBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;oBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;0BAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;0BACrC,CAAC,CAAC,YAAY,CAAC;oBACnB,IAAI,CAAC,oBAAoB,EAAE;wBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;wBAC/E,IAAI,mBAAmB,EAAE;4BACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;kCAChE,6BAA2B,UAAY;kCACtC,cAAc,CAAC,UAAqB,CAAC;4BAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;4BACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;4BAC9B,KAAK,CAAC,OAAO,GAAGC,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;4BAC5E,IAAI;gCACF,IAAI,mBAAmB,EAAE;oCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;oCACjC,IACE,yBAAyB;wCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;wCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCACjD;wCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;4CAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;yCACvC;wCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;4CAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;yCAC7C;qCACF;yCAAM;wCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;wCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;4CAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;yCAC3C;wCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;wCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;4CACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;yCACvC;qCACF;oCAED,IAAI,yBAAyB,EAAE;wCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;wCAClD,IACE,aAAa,CAAC,KAAK;4CACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;4CACA,kBAAkB;gDAChB,OAAO,mBAAmB,KAAK,QAAQ;sDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;sDAC9D,EAAE,CAAC;yCACV;wCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;qCACH;iCACF;6BACF;4BAAC,OAAO,YAAY,EAAE;gCACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;6BAC7J;4BACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;yBAC9B;qBACF;yBAAM,IAAI,YAAY,EAAE;wBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;4BAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;4BACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;gCACpF,kBAAkB;oCAChB,OAAO,kBAAkB,KAAK,QAAQ;0CAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;0CAC3D,EAAE,CAAC;6BACV;4BACD,IAAI;gCACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;6BACH;4BAAC,OAAO,KAAK,EAAE;gCACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;gCACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;gCAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;gCACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;6BAClC;yBACF;6BAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;4BAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;yBAC7E;wBAED,IAAI,YAAY,CAAC,aAAa,EAAE;4BAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;yBACH;qBACF;iBACF;aACF;YACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;SACxC,CAAC,CAAC;IACL,CAAC;IAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;QAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;YACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;YACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;YAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;YACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;SAC1B,CAAC;QAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;YACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;YAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;YAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;kBAC5C,EAAE;kBACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;YACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;gBAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;gBACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;oBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;oBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;iBAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;iBAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;gBAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;qBAClB,IAAI,CAAC,UAAC,IAAI;oBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;oBACpC,OAAO,iBAAiB,CAAC;iBAC1B,CAAC;qBACD,KAAK,CAAC,YAAY,CAAC,CAAC;aACxB;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;IAC5C,CAAC;;ICrSD;AACA,aAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;IAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;IACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;IACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;IAEnD;;;;IAIA;QAA4C,0CAAiB;;;;;;;;;;QA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAXC,SAAS,QAAQ,CAAC,CAAM;gBACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;aAC9B;YACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;YACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;YAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;YACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;kBAC9C,gBAAgB;kBAChB,iCAAiC,CAAC;;SACvC;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAKC;YAJC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;iBAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAC7E;QACH,6BAAC;IAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;IAED;;;;;;;;IAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;QAEpB,IACE,UAAU,IAAI,SAAS;aACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;YACxC,UAAU,KAAK,GAAG;YAClB,UAAU,KAAK,GAAG,EAClB;YACA,OAAO,KAAK,CAAC;SACd;QAED,IAAI,YAAoB,CAAC;QACzB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;;IAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;YAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;QAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;QAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;QAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;YAC7E,OAAOC,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;iBAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;iBAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;iBAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;SACrE;aAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;YAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;gBACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;YACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SAC5B;aAAM;YACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;SAClC;IACH,CAAC;;IC3ND;AACA,aAYgB,6BAA6B,CAC3C,mBAA8C;QAA9C,oCAAA,EAAA,8CAA8C;QAE9C,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;aACpF;SACF,CAAC;IACJ,CAAC;IAED;QAAmD,iDAAiB;QAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;YAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;SAGrC;QAEM,mDAAW,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;gBACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEC,YAAkB,EAAE,CAAC,CAAC;aACtE;YACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QACH,oCAAC;IAAD,CAfA,CAAmD,iBAAiB,GAenE;;ICtCD;IACA;AAcA,aAAgB,sBAAsB;QACpC,OAAO,mBAAmB,CAAC;IAC7B,CAAC;AAED,aAAgB,uBAAuB;QACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;QAChD,IAAM,MAAM,GAAG;YACb,GAAG,EAAE,IAAI;YACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,EAAE,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;SAChE,CAAC;QAEF,OAAO,CAAC,MAAM,CAAC,CAAC;IAClB,CAAC;;IC3BD;AACA,IAgBA,SAAS,cAAc;QACrB,IAAM,aAAa,GAAG;YACpB,GAAG,EAAE,YAAY;YACjB,KAAK,EAAE,SAAS,CAAC,aAAa;SAC/B,CAAC;QAEF,OAAO,CAAC,aAAa,CAAC,CAAC;IACzB,CAAC;IAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;QADpB,6BAAA,EAAA,kBAAkB;QAClB,+BAAA,EAAA,oBAAoB;QAEpB,OAAO,aAAa;aACjB,GAAG,CAAC,UAAC,IAAI;YACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;YACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;SAC9B,CAAC;aACD,IAAI,CAAC,YAAY,CAAC,CAAC;IACxB,CAAC;AAED,IAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,aAAgB,wBAAwB;QACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;QACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;QACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAC/E,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,eAAe,CAAC,aAA6B;QAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;QAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;cAC9C,wBAAwB,EAAE;cAC1B,aAAa,CAAC,KAAK,CAAC;QAE1B,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;aAC7D;SACF,CAAC;IACJ,CAAC;IAED;QAAqC,mCAAiB;QACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;YAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;YANU,iBAAW,GAAX,WAAW,CAAe;YAC1B,cAAQ,GAAR,QAAQ,CAA0B;YACjC,eAAS,GAAT,SAAS,CAAQ;YACjB,iBAAW,GAAX,WAAW,CAAQ;;SAG9B;QAED,qCAAW,GAAX,UAAY,OAAwB;YAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;YACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,4CAAkB,GAAlB,UAAmB,OAAwB;YACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;gBAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;aACvD;SACF;QACH,sBAAC;IAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ICvFD;AACA,IAMA;;;AAGA;QAAA;YACmB,cAAS,GAAwD,EAAE,CAAC;SAqHtF;;;;QAhHQ,sBAAG,GAAV;YACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;SAC/C;;;;;;QAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;YACnD,IAAI,aAAa,EAAE;gBACjB,IAAI,cAAc,IAAI,SAAS,EAAE;oBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;oBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;iBAC1C;qBAAM;oBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;iBACtC;aACF;SACF;;;;;QAMM,sBAAG,GAAV,UAAW,aAAqB;YAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;SAClE;;;;QAKM,2BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;gBAC1C,IAAI,MAAM,EAAE;oBACV,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;gBACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;oBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;oBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;wBAA/C,IAAM,qBAAqB,uBAAA;wBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;qBACpE;oBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACtC;qBAAM;oBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;iBAChD;aACF;YACD,OAAO,MAAM,CAAC;SACf;;;;QAKa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;YAE9B,IAAI,IAAI,EAAE;gBACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,YAAY,GAAuB,eAAe,CAAC;gBAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;gBACvB,IAAI,cAAc,GAAG,EAAE,CAAC;gBACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;oBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;oBACzC,QAAQ,YAAY;wBAClB,KAAK,eAAe;4BAClB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,YAAY,GAAG,gBAAgB,CAAC;oCAChC,MAAM;gCAER,KAAK,GAAG;oCACN,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,MAAM;gCAER;oCACE,aAAa,IAAI,gBAAgB,CAAC;oCAClC,MAAM;6BACT;4BACD,MAAM;wBAER,KAAK,gBAAgB;4BACnB,QAAQ,gBAAgB;gCACtB,KAAK,GAAG;oCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;oCAC1C,aAAa,GAAG,EAAE,CAAC;oCACnB,cAAc,GAAG,EAAE,CAAC;oCACpB,YAAY,GAAG,eAAe,CAAC;oCAC/B,MAAM;gCAER;oCACE,cAAc,IAAI,gBAAgB,CAAC;oCACnC,MAAM;6BACT;4BACD,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;qBACzE;iBACF;gBACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;oBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;iBAC3C;aACF;YAED,OAAO,MAAM,CAAC;SACf;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;AAGA;QAAA;SAiPC;;;;;QAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;YACzC,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;aAC1B;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;aAC5B;SACF;;;;QAKM,8BAAS,GAAhB;YACE,OAAO,IAAI,CAAC,OAAO,CAAC;SACrB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;aAClC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAiC;YAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;gBACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;aACnC;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;;QAMM,4BAAO,GAAd,UAAe,IAAwB;YACrC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;aACxB;iBAAM;gBACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;gBACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;oBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;oBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;iBAC9E;qBAAM;oBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;iBACxB;aACF;SACF;;;;;QAMM,+BAAU,GAAjB,UAAkB,IAAwB;YACxC,IAAI,IAAI,EAAE;gBACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACrD,IAAI,WAAW,EAAE;oBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAC9B,WAAW,IAAI,GAAG,CAAC;qBACpB;oBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;wBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;qBAC1B;oBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;iBAC3B;gBACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;;;;QAKM,4BAAO,GAAd;YACE,OAAO,IAAI,CAAC,KAAK,CAAC;SACnB;;;;QAKM,6BAAQ,GAAf,UAAgB,KAAyB;YACvC,IAAI,CAAC,KAAK,EAAE;gBACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;aACzB;iBAAM;gBACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;aACrC;SACF;;;;;;QAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;YAC3E,IAAI,kBAAkB,EAAE;gBACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;oBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;iBAC9B;gBACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;aAC1D;SACF;;;;;QAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;YACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;SACtE;;;;QAKM,6BAAQ,GAAf;YACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;SACzD;;;;QAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;YACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;YAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;gBACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;gBACxD,IAAI,KAAK,EAAE;oBACT,QAAQ,KAAK,CAAC,IAAI;wBAChB,KAAK,QAAQ;4BACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACvC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BACrC,MAAM;wBAER,KAAK,MAAM;4BACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;4BAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;gCAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;6BACxB;4BACD,MAAM;wBAER,KAAK,OAAO;4BACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;4BACzC,MAAM;wBAER;4BACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;qBAC/D;iBACF;aACF;SACF;QAEM,6BAAQ,GAAf;YACE,IAAI,MAAM,GAAG,EAAE,CAAC;YAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;aAChC;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;aAC5B;YAED,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBAC/B,MAAM,IAAI,GAAG,CAAC;iBACf;gBACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;aACtB;YAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;gBACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;aACxC;YAED,OAAO,MAAM,CAAC;SACf;;;;;QAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;YACzD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;gBACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;aACvE;SACF;QAEa,gBAAK,GAAnB,UAAoB,IAAY;YAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;YAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;YACnC,OAAO,MAAM,CAAC;SACf;QACH,iBAAC;IAAD,CAAC,IAAA;IAMD;QACE,kBAAmC,IAAY,EAAkB,IAAkB;YAAhD,SAAI,GAAJ,IAAI,CAAQ;YAAkB,SAAI,GAAJ,IAAI,CAAc;SAAI;QAEzE,eAAM,GAApB,UAAqB,IAAY;YAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;SACrC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,aAAI,GAAlB,UAAmB,IAAY;YAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACnC;QAEa,cAAK,GAAnB,UAAoB,IAAY;YAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACpC;QACH,eAAC;IAAD,CAAC,IAAA;IAED;;;;AAIA,aAAgB,uBAAuB,CAAC,SAAiB;QACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;QACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;aACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;aACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;IACJ,CAAC;IAED;;;IAGA;QAME,sBAA4B,KAAa,EAAE,KAAyB;YAAxC,UAAK,GAAL,KAAK,CAAQ;YACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;YAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;YACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;SACxB;;;;;QAMM,8BAAO,GAAd;YACE,OAAO,IAAI,CAAC,aAAa,CAAC;SAC3B;;;;QAKM,2BAAI,GAAX;YACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;gBAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;aAChC;iBAAM;gBACL,QAAQ,IAAI,CAAC,aAAa;oBACxB,KAAK,QAAQ;wBACX,UAAU,CAAC,IAAI,CAAC,CAAC;wBACjB,MAAM;oBAER,KAAK,gBAAgB;wBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;wBACvB,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,MAAM;wBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;wBACf,MAAM;oBAER,KAAK,OAAO;wBACV,SAAS,CAAC,IAAI,CAAC,CAAC;wBAChB,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;iBAC5E;aACF;YACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;SAC7B;QACH,mBAAC;IAAD,CAAC,IAAA;IAED;;;IAGA,SAAS,aAAa,CAAC,SAAuB;QAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;YACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;SACjD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;IACzD,CAAC;IAED;;;IAGA,SAAS,mBAAmB,CAAC,SAAuB;QAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;IAClD,CAAC;IAED;;;;IAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;QAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,EAAE;gBACT,IAAI,GAAG,CAAC,CAAC;aACV;YACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;SACjC;IACH,CAAC;IAED;;;;IAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;QACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;QAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;YACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;SAClC;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;IACtE,CAAC;IAED;;;;IAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;QACnF,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;YAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;gBAChC,MAAM;aACP;iBAAM;gBACL,MAAM,IAAI,gBAAgB,CAAC;gBAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;aAC1B;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;IAIA,SAAS,sBAAsB,CAAC,SAAuB;QACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;IACzF,CAAC;IAED;;;;IAIA,SAAS,kBAAkB,CAAC,SAAuB;QAAE,+BAAkC;aAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;YAAlC,8CAAkC;;QACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;IACJ,CAAC;IAED,SAAS,UAAU,CAAC,SAAuB;QACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;QACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;IACH,CAAC;IAED,SAAS,gBAAgB,CAAC,SAAuB;QAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;gBACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;SACF;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;gBAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;aAClC;iBAAM;gBACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;aACnC;SACF;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;SAC7B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;QAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,QAAQ,CAAC,SAAuB;QACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;QACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;YACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;IACH,CAAC;IAED,SAAS,SAAS,CAAC,SAAuB;QACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;QAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;QAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;IACnC,CAAC;;IClpBD;AACA,IA4BO,IAAM,sBAAsB,GAAoB;QACrD,eAAe,EAAE,IAAI;QACrB,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,aAAgB,cAAc,CAAC,cAAmB;QAAnB,+BAAA,EAAA,mBAAmB;QAChD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;aAChE;SACF,CAAC;IACJ,CAAC;IAED;QAAoC,kCAAiB;QACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;YAAf,2BAAA,EAAA,eAAe;YAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,gBAAU,GAAV,UAAU,CAAK;;SAGzB;QAEM,oCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;SAC1D;QACH,qBAAC;IAAD,CAdA,CAAoC,iBAAiB,GAcpD;IAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;QAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;QACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QACxD,IACE,cAAc;aACb,MAAM,KAAK,GAAG;iBACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;iBACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;gBAC7C,MAAM,KAAK,GAAG,CAAC;aAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;iBAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;YACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;YAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;YAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;gBACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;gBACvB,OAAO,OAAO,CAAC,IAAI,CAAC;aACrB;YAED,OAAO,MAAM,CAAC,WAAW;iBACtB,WAAW,CAAC,OAAO,CAAC;iBACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;iBAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SACpD;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;QAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;YACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;YAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;SACzB;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC;;aC5Fe,oBAAoB,CAAC,YAAiB;QAAjB,6BAAA,EAAA,iBAAiB;QACpD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;aACpE;SACF,CAAC;IACJ,CAAC;IAED;QAA0C,wCAAiB;QACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;YAAlB,8BAAA,EAAA,kBAAkB;YAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHU,mBAAa,GAAb,aAAa,CAAK;;SAG5B;QAEM,0CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,2BAAC;IAAD,CAdA,CAA0C,iBAAiB,GAc1D;IAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;QAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;YACxE,IAAI,MAAM,EAAE;gBACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;qBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;qBAClB,IAAI,CAAC,UAAC,kBAAkB;oBACvB,IAAI,kBAAkB,EAAE;;;wBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;wBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;qBACxD;oBACD,OAAO,QAAQ,CAAC;iBACjB,CAAC,EACJ;aACH;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;IAED;;;;;;IAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;QAAnB,4BAAA,EAAA,mBAAmB;QAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;QAC5D,IAAI,WAAW,EAAE;YACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;SACtC;;;QAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;QAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;QAE1E,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;;;;;IAMA,SAAS,yBAAyB,CAAC,IAAY;QAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;QACzB,IAAI,IAAI,EAAE;YACR,IAAI;gBACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;aACjC;YAAC,OAAO,GAAG,EAAE;;aAEb;YACD,IACE,YAAY;gBACZ,YAAY,CAAC,KAAK;gBAClB,YAAY,CAAC,KAAK,CAAC,OAAO;gBAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;gBACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;gBACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC/D,IAAI,QAAQ,EAAE;oBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;iBACzB;aACF;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;IAMA,SAAS,sBAAsB,CAAC,GAAW;QACzC,IAAI,MAAM,CAAC;QACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;SACtB;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;SAClF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;;IASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;QAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;QACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;QAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;QACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;QAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;QAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;YAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;gBAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;aAC7F;YACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;SAC/D,CAAC,CAAC;IACL,CAAC;IAED;;;;;;;;;IASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;QAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;QAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;QACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;QAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;YACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;YAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;gBACrF,OAAO,IAAI,CAAC;aACb;iBAAM;gBACL,OAAOC,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;qBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;aACpE;SACF,CAAC,CAAC;IACL,CAAC;;ICpMD;AACA,aAYgB,aAAa,CAC3B,sBAAgD;QAEhD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;aACvE;SACF,CAAC;IACJ,CAAC;IAED;QAAmC,iCAAiB;QAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;YAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;YAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;SAGxD;QAED,mCAAW,GAAX,UAAY,OAAwB;YAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SACzD;QAEM,mCAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;gBAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;aAAA,CAC1C,CAAC;SACH;QACH,oBAAC;IAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ICzCD;AACA,aAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAEzB,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;aACH;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;IAUA;QAA4C,0CAAiB;QAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;YAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;YAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;YAC1C,gCAA0B,GAAG,CAAC,CAAC;YAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;YAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;YAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;YAChG,KAAI,CAAC,aAAa;gBAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;YACzF,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;YAC7C,KAAI,CAAC,gBAAgB;gBACnB,OAAO,gBAAgB,KAAK,QAAQ;sBAChC,gBAAgB;sBAChB,KAAI,CAAC,iCAAiC,CAAC;;SAC9C;QAEM,4CAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAIC;YAHC,OAAO,IAAI,CAAC,WAAW;iBACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;iBAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAC,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;SAClE;QACH,6BAAC;IAAD,CArCA,CAA4C,iBAAiB,GAqC5D;IAED;;;;;;;IAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;QACvE,IAAI,YAAY,CAAC;QACjB,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;SACnF;aAAM;YACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;SAClD;QACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;IAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;QAEhB,IAAI,CAAC,SAAS,EAAE;YACd,SAAS,GAAG;gBACV,UAAU,EAAE,CAAC;gBACb,aAAa,EAAE,CAAC;aACjB,CAAC;SACH;QAED,IAAI,GAAG,EAAE;YACP,IAAI,SAAS,CAAC,KAAK,EAAE;gBACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;aAClC;YAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;SACvB;;QAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;QAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;QACxF,cAAc,IAAI,gBAAgB,CAAC;QAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;QAEF,OAAO,SAAS,CAAC;IACnB,CAAC;IAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;wBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;8BAElD,GAAG;4BACH,GAAG,CAAC,IAAI;4BACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;6BAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;gCACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;gCAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;gCAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;gCACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;wBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;wBAA1C,SAA0C,CAAC;wBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;wBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;wBAGrE,IAAI,GAAG,EAAE;;4BAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;yBACxC;wBACD,sBAAO,iBAAiB,EAAC;;;;;KAE5B;;IC1LD;AACA,IAKA,WAAY,qBAAqB;QAC/B,kCAAS,CAAA;QACT,kCAAS,CAAA;QACT,mCAAU,CAAA;QACV,oCAAW,CAAA;QACX,wCAAe,CAAA;IACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ICZD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;IChCD;AACA,IAYA,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,aAAgB,uBAAuB,CAAC,SAAkB;QACxD,OAAO,SAAS,CAAC;IACnB,CAAC;AAED,aAAgB,WAAW,CAAC,cAA8B;QACxD,OAAO;YACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;gBACrE,MAAM,0BAA0B,CAAC;aAClC;SACF,CAAC;IACJ,CAAC;IAED;QAAiC,+BAAiB;QAChD,qBAAY,UAAyB,EAAE,OAAiC;YAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,MAAM,0BAA0B,CAAC;SAClC;QAEM,iCAAW,GAAlB,UAAmB,QAAyB;YAC1C,MAAM,0BAA0B,CAAC;SAClC;QACH,kBAAC;IAAD,CATA,CAAiC,iBAAiB,GASjD;;ICpCD;AACA,IAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;IACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,aAAgB,qBAAqB,CACnC,UAAwC;QAAxC,2BAAA,EAAA,gCAAwC;QAExC,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;aACnE;SACF,CAAC;IACJ,CAAC;IAED;;;;;;IAMA;QAA2C,yCAAiB;QAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;YAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;SAC9B;QAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;oBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;4BACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;yBAC7C,CAAC,EAAC;;;SACJ;QAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;4BAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;gCACvD,sBAAO,YAAY,EAAC;6BACrB;4BAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;kCAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;4BAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;iCACE,SAAS,EAAT,wBAAS;4BACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;4BAAtB,SAAsB,CAAC;4BACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;4BAArD,GAAG,GAAG,SAA+C;4BAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;gCAIxD,sBAAO,YAAY,EAAC;;;;SACrB;QAEa,2CAAqB,GAAnC,UAAoC,WAAmB;YACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;YAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;gBACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;aACrE;iBAAM;gBACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;aACnC;SACF;QAEa,+CAAyB,GAAvC,UAAwC,WAAmB;YACzD,IAAI;gBACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;gBAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;gBAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;gBAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;aAC9C;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,SAAS,CAAC;aAClB;SACF;QACH,4BAAC;IAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ICxGD;AACA,IASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA,IAAO,IAAM,6BAA6B,GAAG;QAC3C,gCAAgC;QAChC,qCAAqC;QACrC,sCAAsC;QACtC,gCAAgC;KACjC,CAAC;IAEF;;;;AAIA;QAGE,wCACE,oBAAqC,EACrC,MAAmE;YAAnE,uBAAA,EAAA,gDAAmE;YAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;YACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;SACtB;QAEY,iDAAQ,GAArB;;;;;gCACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;4BAAnE,WAAW,GAAG,SAAqD;4BACzE,IAAI,WAAW,KAAK,IAAI,EAAE;gCAClB,MAAM,GAAkB;oCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;oCAC9B,SAAS,EAAE,4BAA4B;oCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;iCAC1C,CAAC;gCACF,sBAAO,MAAM,EAAC;6BACf;iCAAM;gCACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;6BAC/C;;;;SACF;QAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;gCACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;4BAArC,aAAa,GAAG,SAAqB;4BAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrBC,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;4BACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;SACrC;QACH,qCAAC;IAAD,CAAC;;IC3DD;AACA,IA+JA;;;;AAIA;;;;;;;QAgCE,uBACE,WAAwD,EACxD,OAA8B;YAE9B,IAAI,CAAC,OAAO,EAAE;gBACZ,OAAO,GAAG,EAAE,CAAC;aACd;YAED,IAAI,OAAO,CAAC,OAAO,EAAE;gBACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;aAChC;YAED,IAAI,wBAA8D,CAAC;YACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;gBAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;gBAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;oBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;iBACvC;gBACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;aACnF;iBAAM;gBACL,wBAAwB,GAAG,WAAW,CAAC;aACxC;YAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;gBACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;aAC/E;YAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;YACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,aAAiB,EAAE,CAAC;YACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;YAElF,IAAI,sBAA8C,CAAC;YACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;gBACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;aACzD;iBAAM;gBACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;gBACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;oBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;oBACpF,IAAI,yBAAyB,EAAE;wBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;qBACpD;iBACF;aACF;YACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;SACvD;;;;QAKD,mCAAW,GAAX,UAAY,OAAgD;YAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;gBAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;YAED,IAAI,WAA4B,CAAC;YACjC,IAAI;gBACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;oBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;oBACpC,WAAW,GAAG,OAAO,CAAC;iBACvB;qBAAM;oBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;oBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;iBAC5C;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAC9B;YAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;YACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;oBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;iBACH;aACF;YACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;SAC9C;;;;;;;QAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;YAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;gBACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;gBACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;aACxC;YAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;YAEtC,IAAI,MAA6B,CAAC;YAClC,IAAI;gBACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;gBAC1E,IAAI,CAAC,OAAO,EAAE;oBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;iBACH;gBAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;gBAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;gBAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gBACzD,IAAI,aAAa,CAAC,IAAI,EAAE;oBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;iBAC3C;gBACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;oBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;wBAAnD,IAAM,YAAY,SAAA;wBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;wBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;4BAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;yBAC3D;wBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;qBACH;iBACF;gBACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;wBAAvD,IAAM,cAAc,SAAA;wBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;4BACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;4BACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;gCAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;oCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;wCACpC,mBAAmB,GAAG,EAAE,CAAC;qCAC1B;yCAAM;wCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;4CACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;4CACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;yCACvE;qCACF;iCACF;qCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;oCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;oCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;iCACjF;6BACF;4BACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;gCAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;4CACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;4CACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;yCAC7E;qCACF;iCACF;qCAAM;oCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;iCAC/D;6BACF;4BACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;gCAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;gCAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;4BACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;yBACH;qBACF;iBACF;gBACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;gBAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;gBACzE,IAAI,WAAW,EAAE;oBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;iBACtD;gBAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;oBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;wBAAzD,IAAM,eAAe,SAAA;wBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;wBACF,IAAI,WAAW,IAAI,SAAS,EAAE;4BAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;4BACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;iCACxE,sBAAsB,CAAC;4BAC1B,IAAI,sBAAsB,EAAE;gCAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oCAAvC,IAAM,GAAG,SAAA;oCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;iCACzE;6BACF;iCAAM;gCACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;oCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;6BACH;yBACF;qBACF;iBACF;gBAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;gBAC3E,IAAI,OAAO,EAAE;oBACX,IAAI,OAAO,CAAC,aAAa,EAAE;wBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;4BACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;yBACpF;qBACF;oBAED,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;qBAC/C;oBAED,IAAI,OAAO,CAAC,OAAO,EAAE;wBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;qBACvC;oBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;wBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;qBACzD;oBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;wBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;qBAC7D;iBACF;gBAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;gBAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;gBAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;oBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;iBACnE;gBAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;oBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;iBAAA,CAC1D,CAAC;aACH;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;aAChC;YAED,IAAM,EAAE,GAAG,QAAQ,CAAC;YACpB,IAAI,EAAE,EAAE;gBACN,MAAM;;qBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;qBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;aAC5B;YAED,OAAO,MAAM,CAAC;SACf;QACH,oBAAC;IAAD,CAAC,IAAA;aAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;QAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;YACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;YAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;YAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;YACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI;gBACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;oBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;oBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;oBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;oBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;wBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;yBACH;6BAAM,IAAI,CAAC,QAAQ,EAAE;4BACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;gCAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;6BACpC,CAAC,CAAC;yBACJ;qBACF;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;qBACrD;iBACF;aACF;YAAC,OAAO,KAAK,EAAE;gBACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;aACH;SACF;aAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;YAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;gBAA7D,IAAM,iBAAiB,SAAA;gBAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;gBACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;oBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;oBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;iBACH;aACF;SACF;IACH,CAAC;IAED,SAAS,sBAAsB,CAAC,QAAa;QAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;IAC/C,CAAC;IAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;QAEjC,IAAI,MAAc,CAAC;QACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC7B,MAAM,GAAG,KAAK,CAAC;SAChB;aAAM;YACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;YAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;gBAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;aACxB;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;QAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;QAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;YACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;SAClF;QAED,IAAI,WAAW,EAAE;YACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;gBACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aAC7B;iBAAM;gBACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;aAC5C;SACF;QAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;QACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;QACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;YAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;SAC5F;QAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;QACF,IAAI,eAAe,CAAC,eAAe,EAAE;YACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;SAC5D;QAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;QAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;YACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;SACzC;QAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;QAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;QACzE,IAAI,aAAa,EAAE;YACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,AAAa,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;SACpD;QAED,OAAO,SAAS,CAAC;IACnB,CAAC;AAID,IAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;QAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;IACJ,CAAC;AAED,aAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;QAEtB,IAAI,KAAU,CAAC;QACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;YACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;SACjC;QACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;YAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;oBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;iBACtC;qBAAM;oBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;oBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;qBACnF;oBAED,IAAI,eAAe,GAAG,KAAK,CAAC;oBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;wBACvC,eAAe;4BACb,eAAe,CAAC,QAAQ;iCACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;qBAClE;oBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;iBAC7F;;gBAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;aACnE;SACF;aAAM;YACL,IAAI,eAAe,CAAC,QAAQ,EAAE;gBAC5B,KAAK,GAAG,EAAE,CAAC;aACZ;YAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;gBACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;gBAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;gBAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;gBACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;gBACxE,IAAI,aAAa,KAAK,SAAS,EAAE;oBAC/B,IAAI,CAAC,KAAK,EAAE;wBACV,KAAK,GAAG,EAAE,CAAC;qBACZ;oBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;iBACrC;aACF;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;QAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;QAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;YAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;gBACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;aACpC;iBAAM;gBACL,MAAM;aACP;SACF;QACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;YAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;YAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;SAC7B;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;AAED,aAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;QAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;QAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;QAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;YACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;gBACtC,KAAK,EAAE,SAAS;aACjB,CAAC;SAAA,CAAC;QAEL,IAAI,UAAU,EAAE;YACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;YACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;gBACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;aACJ;YAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;YAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;YACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;gBAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;gBACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;gBAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;oBAA3C,IAAM,GAAG,SAAA;oBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;wBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;qBAChD;iBACF;gBAED,IAAI,aAAa,EAAE;oBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;wBAAzC,IAAM,GAAG,SAAA;wBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;qBACzC;iBACF;gBACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;gBACpC,OAAO,aAAa,CAAC;aACtB;YAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;gBACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;aACJ;SACF;QAED,IACE,UAAU;YACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;YACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;YAEA,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;SACJ;QAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;IACL,CAAC;;ICn1BD;AACA,aAWgB,SAAS,CAAC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QACjD,OAAO;YACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;gBACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;aACnD;SACF,CAAC;IACJ,CAAC;IAED;QAA+B,6BAAiB;QAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;YAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;YAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;YADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;SACtB;QAEM,+BAAW,GAAlB,UAAmB,OAAwB;YAA3C,iBAEC;YADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;SAC9F;QACH,gBAAC;IAAD,CAfA,CAA+B,iBAAiB,GAe/C;IAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;QAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;QAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;QAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;QACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;IACnC,CAAC;;IC9CD;AACA,IAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMC,8BAA4B,GAAG,QAAQ,CAAC;IAE9C;;;AAGA;;;;;;;;QAWE,0BAAY,KAAa,EAAE,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YATrF,wBAAmB,GAAWA,8BAA4B,CAAC;YAUzD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;YACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,sCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;YACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,uBAAC;IAAD,CAAC;;IC/CD;AACA,IAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;IAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;QAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;YAA1D,oCAAA,EAAA,oDAA0D;YAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;YAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;SAChD;;;;;;;QAQD,oDAAW,GAAX,UAAY,WAA4B;YACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;YACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;YAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;gBAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;YAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,qCAAC;IAAD,CAAC;;ICrDD;AACA,IAqBA;;;AAGA;;;;;QAcE,2BAAY,OAAgC;YAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;gBAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;aACH;YACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;;;;;;;QAQD,uCAAW,GAAX,UAAY,WAA4B;YACtC,IAAI,CAAC,WAAW,EAAE;gBAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;aACH;YAED,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;oBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;iBACzC;gBACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;oBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;iBAChE;aACF;YAED,IAAI,IAAI,CAAC,OAAO,EAAE;gBAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;oBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;iBAC/E;gBACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;oBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;oBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;wBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;qBACxB;oBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;iBAClD;aACF;YAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;SACrC;QACH,wBAAC;IAAD,CAAC;;ICxFD;AACA;QAIsC,oCAAiB;;;;;;;QAOrD,0BAAY,QAAgB;YAA5B,iBAUC;YATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;gBAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;aACrF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,QAAQ;iBACxB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,uBAAC;IAAD,CAlBA,CAAsC,iBAAiB;;ICLvD;AACA;QAIuC,qCAAiB;;;;;;;QAOtD,2BAAY,SAAiB;YAA7B,iBAUC;YATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;gBAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;aACtF;YACD,IAAM,OAAO,GAA4B;gBACvC,QAAQ,EAAE;oBACR,aAAa,EAAE,SAAS;iBACzB;aACF,CAAC;YACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;SAChB;QACH,wBAAC;IAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js new file mode 100644 index 0000000000..340f073da7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.browser.min.js @@ -0,0 +1,19 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e=e||self).msRest={})}(this,(function(e){"use strict";function t(e){return e.toLowerCase()}function r(e){return!(!e||"object"!=typeof e)&&("function"==typeof e.rawHeaders&&"function"==typeof e.clone&&"function"==typeof e.get&&"function"==typeof e.set&&"function"==typeof e.contains&&"function"==typeof e.remove&&"function"==typeof e.headersArray&&"function"==typeof e.headerValues&&"function"==typeof e.headerNames&&"function"==typeof e.toJson)}var n,o=function(){function e(e){if(this._headersMap={},e)for(var t in e)this.set(t,e[t])}return e.prototype.set=function(e,r){this._headersMap[t(e)]={name:e,value:r.toString()}},e.prototype.get=function(e){var r=this._headersMap[t(e)];return r?r.value:void 0},e.prototype.contains=function(e){return!!this._headersMap[t(e)]},e.prototype.remove=function(e){var r=this.contains(e);return delete this._headersMap[t(e)],r},e.prototype.rawHeaders=function(){var e={};for(var t in this._headersMap){var r=this._headersMap[t];e[r.name.toLowerCase()]=r.value}return e},e.prototype.headersArray=function(){var e=[];for(var t in this._headersMap)e.push(this._headersMap[t]);return e},e.prototype.headerNames=function(){for(var e=[],t=this.headersArray(),r=0;r1&&void 0!==arguments[1]?arguments[1]:0,r=(f[e[t+0]]+f[e[t+1]]+f[e[t+2]]+f[e[t+3]]+"-"+f[e[t+4]]+f[e[t+5]]+"-"+f[e[t+6]]+f[e[t+7]]+"-"+f[e[t+8]]+f[e[t+9]]+"-"+f[e[t+10]]+f[e[t+11]]+f[e[t+12]]+f[e[t+13]]+f[e[t+14]]+f[e[t+15]]).toLowerCase();if(!c(r))throw TypeError("Stringified UUID is invalid");return r}(n)}var d={msRestVersion:"2.6.2",HTTP:"http:",HTTPS:"https:",HTTP_PROXY:"HTTP_PROXY",HTTPS_PROXY:"HTTPS_PROXY",NO_PROXY:"NO_PROXY",ALL_PROXY:"ALL_PROXY",HttpConstants:{HttpVerbs:{PUT:"PUT",GET:"GET",DELETE:"DELETE",POST:"POST",MERGE:"MERGE",HEAD:"HEAD",PATCH:"PATCH"},StatusCodes:{TooManyRequests:429}},HeaderConstants:{AUTHORIZATION:"authorization",AUTHORIZATION_SCHEME:"Bearer",RETRY_AFTER:"Retry-After",USER_AGENT:"User-Agent"}},y="undefined"!=typeof process&&!!process.version&&!!process.versions&&!!process.versions.node;function m(e){var t={};return t.body=e.bodyAsText,t.headers=e.headers,t.status=e.status,t}function v(e){var t=e.clone();return t.headers&&t.headers.remove("authorization"),t}function g(e){return new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$","ig").test(e)}function b(){return h()}function w(e,t){return new Promise((function(r){return setTimeout((function(){return r(t)}),e)}))}var E=/^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;function R(e){return E.test(e)}function T(e,t,r){return e&&t?e.split(t).join(r||""):e}var _=function(){function e(e,t){void 0===e&&(e={}),this.modelMappers=e,this.isXML=t}return e.prototype.validateConstraints=function(e,t,r){var n=function(e,n){throw new Error('"'+r+'" with value "'+t+'" should satisfy the constraint "'+e+'": '+n+".")};if(e.constraints&&null!=t){var o=e.constraints,i=o.ExclusiveMaximum,a=o.ExclusiveMinimum,s=o.InclusiveMaximum,u=o.InclusiveMinimum,l=o.MaxItems,c=o.MaxLength,f=o.MinItems,p=o.MinLength,h=o.MultipleOf,d=o.Pattern,y=o.UniqueItems;if(null!=i&&t>=i&&n("ExclusiveMaximum",i),null!=a&&t<=a&&n("ExclusiveMinimum",a),null!=s&&t>s&&n("InclusiveMaximum",s),null!=u&&tl&&n("MaxItems",l),null!=c&&t.length>c&&n("MaxLength",c),null!=f&&t.length=0&&e[r-1]===t;)--r;return e.substr(0,r)}(i(e),"=").replace(/\+/g,"-").replace(/\//g,"_")}(t)}return t}(r,t):null!==o.match(/^Sequence$/gi)?n=function(e,t,r,n){if(!Array.isArray(r))throw new Error(n+" must be of type Array.");var o=t.type.element;if(!o||"object"!=typeof o)throw new Error('element" metadata for an Array must be defined in the mapper and it must of type "object" in '+n+".");for(var i=[],a=0;a0&&o[o.length-1])||6!==i[0]&&2!==i[0])){a=0;continue}if(3===i[0]&&(!o||i[1]>o[0]&&i[1]'+te.serializeToString(r)}function ne(e){for(var t=[],r=0,n=Object.keys(e);r=200&&r.status<300);a.headersMapper&&(e.parsedHeaders=t.serializer.deserialize(a.headersMapper,e.headers.rawHeaders(),"operationRes.parsedHeaders"))}}else{var u=t.responses.default;if(u){var l=X(t)?"Unexpected status code: "+n:e.bodyAsText,c=new z(l);c.statusCode=n,c.request=v(e.request),c.response=m(e);var f=e.parsedBody;try{if(f){var p=u.bodyMapper;if(p&&"CloudError"===p.serializedName)f.error&&(f=f.error),f.code&&(c.code=f.code),f.message&&(c.message=f.message);else{var h=f;f.error&&(h=f.error),c.code=h.code,h.message&&(c.message=h.message)}if(p){var d=f;t.isXML&&p.type.name===q.Sequence&&(d="object"==typeof f?f[p.xmlElementName]:[]),c.body=t.serializer.deserialize(p,d,"error.body")}}}catch(t){c.message='Error "'+t.message+'" occurred in deserializing the responseBody - "'+e.bodyAsText+'" for the default response.'}return Promise.reject(c)}}}}return Promise.resolve(e)}))}function fe(e,t,r,n){return{create:function(o,i){return new pe(o,i,e,t,r,n)}}}var pe=function(e){function t(t,r,n,o,i,a){var s=e.call(this,t,r)||this;function u(e){return"number"==typeof e}return s.retryCount=u(n)?n:3,s.retryInterval=u(o)?o:3e4,s.minRetryInterval=u(i)?i:3e3,s.maxRetryInterval=u(a)?a:9e4,s}return U(t,e),t.prototype.sendRequest=function(e){var t=this;return this._nextPolicy.sendRequest(e.clone()).then((function(r){return he(t,e,r)})).catch((function(r){return he(t,e,r.response,void 0,r)}))},t}(oe);function he(e,t,r,n,o){n=function(e,t,r){t||(t={retryCount:0,retryInterval:0}),r&&(t.error&&(r.innerError=t.error),t.error=r),t.retryCount++;var n=Math.pow(2,t.retryCount)-1;return n*=.8*e.retryInterval+Math.floor(Math.random()*(1.2*e.retryInterval-.8*e.retryInterval)),t.retryInterval=Math.min(e.minRetryInterval+n,e.maxRetryInterval),t}(e,n,o);var i=t.abortSignal&&t.abortSignal.aborted;if(!i&&function(e,t,r){if(null==t||t<500&&408!==t||501===t||505===t)return!1;if(!r)throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null.");return(r&&r.retryCount)0},e.prototype.set=function(e,t){if(e)if(null!=t){var r=Array.isArray(t)?t:t.toString();this._rawQuery[e]=r}else delete this._rawQuery[e]},e.prototype.get=function(e){return e?this._rawQuery[e]:void 0},e.prototype.toString=function(){var e="";for(var t in this._rawQuery){e&&(e+="&");var r=this._rawQuery[t];if(Array.isArray(r)){for(var n=[],o=0,i=r;o0)}(e)){var o=void 0;(null==t?void 0:t.baseUri)&&Ge.includes(null==t?void 0:t.baseUri)&&(o=t.baseUri+"/.default"),r=new Je(e,o)}else r=e;if(r&&!r.signRequest)throw new Error("credentials argument needs to implement signRequest method");if(this._withCredentials=t.withCredentials||!1,this._httpClient=t.httpClient||new B,this._requestPolicyOptions=new ie(t.httpPipelineLogger),Array.isArray(t.requestPolicyFactories))n=t.requestPolicyFactories;else if(n=function(e,t){var r=[];t.generateClientRequestIdHeader&&r.push(de(t.clientRequestIdHeaderName));e&&("function"==typeof e.create?r.push(e):r.push(He(e)));var n=Ke(t.userAgentHeaderName,ve),o=Ke(t.userAgent,ge);n&&o&&r.push(be({key:n,value:o}));var i=I(I({},qe),t.redirectOptions);i.handleRedirects&&r.push(Ce(i.maxRetries));r.push(function(e){return void 0===e&&(e=30),{create:function(t,r){return new Ue(t,r,e)}}}(t.rpRegistrationRetryTimeout)),t.noRetryPolicy||(r.push(fe()),r.push(Le()),r.push(Ye()));r.push(ae(t.deserializationContentTypes)),t.proxySettings?r.push(Ve()):void 0;t.agentSettings&&r.push(Be(t.agentSettings));return r}(r,t),t.requestPolicyFactories){var i=t.requestPolicyFactories(n);i&&(n=i)}this._requestPolicyFactories=n}return t.prototype.sendRequest=function(e){if(null==e||"object"!=typeof e)throw new Error("options cannot be null or undefined and it must be of type object.");var t,n;try{"object"==typeof(n=e)&&"string"==typeof n.url&&"string"==typeof n.method&&"object"==typeof n.headers&&r(n.headers)&&"function"==typeof n.validateRequestProperties&&"function"==typeof n.prepare&&"function"==typeof n.clone?(e.validateRequestProperties(),t=e):t=(t=new C).prepare(e)}catch(e){return Promise.reject(e)}var o=this._httpClient;if(this._requestPolicyFactories&&this._requestPolicyFactories.length>0)for(var i=this._requestPolicyFactories.length-1;i>=0;--i)o=this._requestPolicyFactories[i].create(o,this._requestPolicyOptions);return o.sendRequest(t)},t.prototype.sendOperationRequest=function(t,r,n){"function"==typeof t.options&&(n=t.options,t.options=void 0);var o,i=new C;try{var a=r.baseUrl||this.baseUri;if(!a)throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.");i.method=r.httpMethod,i.operationSpec=r;var s=Re.parse(a);if(r.path&&s.appendPath(r.path),r.urlParameters&&r.urlParameters.length>0)for(var u=0,l=r.urlParameters;u0)for(var p=0,h=r.queryParameters;p0){t.formData={};for(var p=0,h=n.formDataParameters;p0){if(o.isConstant)a=o.defaultValue;else{var s=et(r,n);s.propertyFound||(s=et(t,n));var u=!1;s.propertyFound||(u=o.required||"options"===n[0]&&2===n.length),a=u?o.defaultValue:s.propertyValue}var l=Y(n,o);i.serialize(o,a,l)}}else for(var c in o.required&&(a={}),n){var f=o.type.modelProperties[c],p=n[c],h=e(t,r,p,f,i),d=Y(p,f);i.serialize(f,h,d),void 0!==h&&(a||(a={}),a[c]=h)}return a}(e,t,r.parameterPath,r.mapper,n)}function et(e,t){for(var r={propertyFound:!1},n=0;n> Request: "+JSON.stringify(t.request,void 0,2)),e.logger(">> Response status code: "+t.status);var r=t.bodyAsText;return e.logger(">> Body: "+r),Promise.resolve(t)}(t,e)}))},t}(oe);var nt=d.HeaderConstants,ot=function(){function e(e,t){if(void 0===t&&(t="Bearer"),this.authorizationScheme="Bearer",!e)throw new Error("token cannot be null or undefined.");this.token=e,this.authorizationScheme=t}return e.prototype.signRequest=function(e){return e.headers||(e.headers=new o),e.headers.set(nt.AUTHORIZATION,this.authorizationScheme+" "+this.token),Promise.resolve(e)},e}(),it=d.HeaderConstants,at=function(){function e(e,t,r){if(void 0===r&&(r="Basic"),this.authorizationScheme="Basic",null==e||"string"!=typeof e.valueOf())throw new Error("userName cannot be null or undefined and must be of type string.");if(null==t||"string"!=typeof t.valueOf())throw new Error("password cannot be null or undefined and must be of type string.");this.userName=e,this.password=t,this.authorizationScheme=r}return e.prototype.signRequest=function(e){var t=this.userName+":"+this.password,r=this.authorizationScheme+" "+btoa(t);return e.headers||(e.headers=new o),e.headers.set(it.AUTHORIZATION,r),Promise.resolve(e)},e}(),st=function(){function e(e){if(!e||e&&!e.inHeader&&!e.inQuery)throw new Error('options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.');this.inHeader=e.inHeader,this.inQuery=e.inQuery}return e.prototype.signRequest=function(e){if(!e)return Promise.reject(new Error('webResource cannot be null or undefined and must be of type "object".'));if(this.inHeader)for(var t in e.headers||(e.headers=new o),this.inHeader)e.headers.set(t,this.inHeader[t]);if(this.inQuery){if(!e.url)return Promise.reject(new Error("url cannot be null in the request object."));for(var r in e.url.indexOf("?")<0&&(e.url+="?"),this.inQuery)e.url.endsWith("?")||(e.url+="&"),e.url+=r+"="+this.inQuery[r]}return Promise.resolve(e)},e}(),ut=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("topicKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(st),lt=function(e){function t(t){if(!t||t&&"string"!=typeof t)throw new Error("domainKey cannot be null or undefined and must be of type string.");var r={inHeader:{"aeg-sas-key":t}};return e.call(this,r)||this}return U(t,e),t}(st);e.ApiKeyCredentials=st,e.AzureIdentityCredentialAdapter=Je,e.BaseRequestPolicy=oe,e.BasicAuthenticationCredentials=at,e.Constants=d,e.DefaultHttpClient=B,e.DomainCredentials=lt,e.HttpHeaders=o,e.MapperType=q,e.RequestPolicyOptions=ie,e.RestError=z,e.Serializer=_,e.ServiceClient=We,e.TokenCredentials=ot,e.TopicCredentials=ut,e.URLBuilder=Re,e.URLQuery=Ee,e.WebResource=C,e.agentPolicy=Be,e.applyMixins=function(e,t){t.forEach((function(t){Object.getOwnPropertyNames(t.prototype).forEach((function(r){e.prototype[r]=t.prototype[r]}))}))},e.delay=w,e.deserializationPolicy=ae,e.deserializeResponseBody=ce,e.encodeUri=function(e){return encodeURIComponent(e).replace(/!/g,"%21").replace(/"/g,"%27").replace(/\(/g,"%28").replace(/\)/g,"%29").replace(/\*/g,"%2A")},e.executePromisesSequentially=function(e,t){var r=Promise.resolve(t);return e.forEach((function(e){r=r.then(e)})),r},e.exponentialRetryPolicy=fe,e.flattenResponse=tt,e.generateClientRequestIdPolicy=de,e.generateUuid=b,e.getDefaultProxySettings=Qe,e.getDefaultUserAgentValue=ge,e.isDuration=R,e.isNode=y,e.isValidUuid=g,e.logPolicy=function(e){return void 0===e&&(e=console.log),{create:function(t,r){return new rt(t,r,e)}}},e.promiseToCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){t(void 0,e)}),(function(e){t(e)}))}},e.promiseToServiceCallback=function(e){if("function"!=typeof e.then)throw new Error("The provided input is not a Promise.");return function(t){e.then((function(e){process.nextTick(t,void 0,e.parsedBody,e.request,e)}),(function(e){process.nextTick(t,e)}))}},e.proxyPolicy=Ve,e.redirectPolicy=Ce,e.serializeObject=function e(t){if(null!=t){if(t instanceof Uint8Array)return t=i(t);if(t instanceof Date)return t.toISOString();if(Array.isArray(t)){for(var r=[],n=0;n 1 && arguments[1] !== undefined ? arguments[1] : 0;\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n var uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!validate(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nexport default stringify;","import rng from './rng.js';\nimport stringify from './stringify.js';\n\nfunction v4(options, buf, offset) {\n options = options || {};\n var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (var i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return stringify(rnds);\n}\n\nexport default v4;","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.2\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/*! *****************************************************************************\r\nCopyright (c) Microsoft Corporation.\r\n\r\nPermission to use, copy, modify, and/or distribute this software for any\r\npurpose with or without fee is hereby granted.\r\n\r\nTHE SOFTWARE IS PROVIDED \"AS IS\" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH\r\nREGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY\r\nAND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,\r\nINDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM\r\nLOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR\r\nOTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR\r\nPERFORMANCE OF THIS SOFTWARE.\r\n***************************************************************************** */\r\n/* global Reflect, Promise */\r\n\r\nvar extendStatics = function(d, b) {\r\n extendStatics = Object.setPrototypeOf ||\r\n ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||\r\n function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };\r\n return extendStatics(d, b);\r\n};\r\n\r\nexport function __extends(d, b) {\r\n extendStatics(d, b);\r\n function __() { this.constructor = d; }\r\n d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());\r\n}\r\n\r\nexport var __assign = function() {\r\n __assign = Object.assign || function __assign(t) {\r\n for (var s, i = 1, n = arguments.length; i < n; i++) {\r\n s = arguments[i];\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];\r\n }\r\n return t;\r\n }\r\n return __assign.apply(this, arguments);\r\n}\r\n\r\nexport function __rest(s, e) {\r\n var t = {};\r\n for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)\r\n t[p] = s[p];\r\n if (s != null && typeof Object.getOwnPropertySymbols === \"function\")\r\n for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {\r\n if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))\r\n t[p[i]] = s[p[i]];\r\n }\r\n return t;\r\n}\r\n\r\nexport function __decorate(decorators, target, key, desc) {\r\n var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;\r\n if (typeof Reflect === \"object\" && typeof Reflect.decorate === \"function\") r = Reflect.decorate(decorators, target, key, desc);\r\n else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;\r\n return c > 3 && r && Object.defineProperty(target, key, r), r;\r\n}\r\n\r\nexport function __param(paramIndex, decorator) {\r\n return function (target, key) { decorator(target, key, paramIndex); }\r\n}\r\n\r\nexport function __metadata(metadataKey, metadataValue) {\r\n if (typeof Reflect === \"object\" && typeof Reflect.metadata === \"function\") return Reflect.metadata(metadataKey, metadataValue);\r\n}\r\n\r\nexport function __awaiter(thisArg, _arguments, P, generator) {\r\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\r\n return new (P || (P = Promise))(function (resolve, reject) {\r\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\r\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\r\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\r\n step((generator = generator.apply(thisArg, _arguments || [])).next());\r\n });\r\n}\r\n\r\nexport function __generator(thisArg, body) {\r\n var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;\r\n return g = { next: verb(0), \"throw\": verb(1), \"return\": verb(2) }, typeof Symbol === \"function\" && (g[Symbol.iterator] = function() { return this; }), g;\r\n function verb(n) { return function (v) { return step([n, v]); }; }\r\n function step(op) {\r\n if (f) throw new TypeError(\"Generator is already executing.\");\r\n while (_) try {\r\n if (f = 1, y && (t = op[0] & 2 ? y[\"return\"] : op[0] ? y[\"throw\"] || ((t = y[\"return\"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;\r\n if (y = 0, t) op = [op[0] & 2, t.value];\r\n switch (op[0]) {\r\n case 0: case 1: t = op; break;\r\n case 4: _.label++; return { value: op[1], done: false };\r\n case 5: _.label++; y = op[1]; op = [0]; continue;\r\n case 7: op = _.ops.pop(); _.trys.pop(); continue;\r\n default:\r\n if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }\r\n if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }\r\n if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }\r\n if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }\r\n if (t[2]) _.ops.pop();\r\n _.trys.pop(); continue;\r\n }\r\n op = body.call(thisArg, _);\r\n } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }\r\n if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };\r\n }\r\n}\r\n\r\nexport function __createBinding(o, m, k, k2) {\r\n if (k2 === undefined) k2 = k;\r\n o[k2] = m[k];\r\n}\r\n\r\nexport function __exportStar(m, exports) {\r\n for (var p in m) if (p !== \"default\" && !exports.hasOwnProperty(p)) exports[p] = m[p];\r\n}\r\n\r\nexport function __values(o) {\r\n var s = typeof Symbol === \"function\" && Symbol.iterator, m = s && o[s], i = 0;\r\n if (m) return m.call(o);\r\n if (o && typeof o.length === \"number\") return {\r\n next: function () {\r\n if (o && i >= o.length) o = void 0;\r\n return { value: o && o[i++], done: !o };\r\n }\r\n };\r\n throw new TypeError(s ? \"Object is not iterable.\" : \"Symbol.iterator is not defined.\");\r\n}\r\n\r\nexport function __read(o, n) {\r\n var m = typeof Symbol === \"function\" && o[Symbol.iterator];\r\n if (!m) return o;\r\n var i = m.call(o), r, ar = [], e;\r\n try {\r\n while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);\r\n }\r\n catch (error) { e = { error: error }; }\r\n finally {\r\n try {\r\n if (r && !r.done && (m = i[\"return\"])) m.call(i);\r\n }\r\n finally { if (e) throw e.error; }\r\n }\r\n return ar;\r\n}\r\n\r\nexport function __spread() {\r\n for (var ar = [], i = 0; i < arguments.length; i++)\r\n ar = ar.concat(__read(arguments[i]));\r\n return ar;\r\n}\r\n\r\nexport function __spreadArrays() {\r\n for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;\r\n for (var r = Array(s), k = 0, i = 0; i < il; i++)\r\n for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)\r\n r[k] = a[j];\r\n return r;\r\n};\r\n\r\nexport function __await(v) {\r\n return this instanceof __await ? (this.v = v, this) : new __await(v);\r\n}\r\n\r\nexport function __asyncGenerator(thisArg, _arguments, generator) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var g = generator.apply(thisArg, _arguments || []), i, q = [];\r\n return i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i;\r\n function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }\r\n function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }\r\n function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }\r\n function fulfill(value) { resume(\"next\", value); }\r\n function reject(value) { resume(\"throw\", value); }\r\n function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }\r\n}\r\n\r\nexport function __asyncDelegator(o) {\r\n var i, p;\r\n return i = {}, verb(\"next\"), verb(\"throw\", function (e) { throw e; }), verb(\"return\"), i[Symbol.iterator] = function () { return this; }, i;\r\n function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === \"return\" } : f ? f(v) : v; } : f; }\r\n}\r\n\r\nexport function __asyncValues(o) {\r\n if (!Symbol.asyncIterator) throw new TypeError(\"Symbol.asyncIterator is not defined.\");\r\n var m = o[Symbol.asyncIterator], i;\r\n return m ? m.call(o) : (o = typeof __values === \"function\" ? __values(o) : o[Symbol.iterator](), i = {}, verb(\"next\"), verb(\"throw\"), verb(\"return\"), i[Symbol.asyncIterator] = function () { return this; }, i);\r\n function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }\r\n function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }\r\n}\r\n\r\nexport function __makeTemplateObject(cooked, raw) {\r\n if (Object.defineProperty) { Object.defineProperty(cooked, \"raw\", { value: raw }); } else { cooked.raw = raw; }\r\n return cooked;\r\n};\r\n\r\nexport function __importStar(mod) {\r\n if (mod && mod.__esModule) return mod;\r\n var result = {};\r\n if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];\r\n result.default = mod;\r\n return result;\r\n}\r\n\r\nexport function __importDefault(mod) {\r\n return (mod && mod.__esModule) ? mod : { default: mod };\r\n}\r\n\r\nexport function __classPrivateFieldGet(receiver, privateMap) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to get private field on non-instance\");\r\n }\r\n return privateMap.get(receiver);\r\n}\r\n\r\nexport function __classPrivateFieldSet(receiver, privateMap, value) {\r\n if (!privateMap.has(receiver)) {\r\n throw new TypeError(\"attempted to set private field on non-instance\");\r\n }\r\n privateMap.set(receiver, value);\r\n return value;\r\n}\r\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpClient } from \"./httpClient\";\nimport { HttpHeaders } from \"./httpHeaders\";\nimport { WebResourceLike, TransferProgressEvent } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { RestError } from \"./restError\";\n\n/**\n * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests.\n */\nexport class XhrHttpClient implements HttpClient {\n public sendRequest(request: WebResourceLike): Promise {\n const xhr = new XMLHttpRequest();\n\n if (request.agentSettings) {\n throw new Error(\"HTTP agent settings not supported in browser environment\");\n }\n\n if (request.proxySettings) {\n throw new Error(\"HTTP proxy is not supported in browser environment\");\n }\n\n const abortSignal = request.abortSignal;\n if (abortSignal) {\n const listener = () => {\n xhr.abort();\n };\n abortSignal.addEventListener(\"abort\", listener);\n xhr.addEventListener(\"readystatechange\", () => {\n if (xhr.readyState === XMLHttpRequest.DONE) {\n abortSignal.removeEventListener(\"abort\", listener);\n }\n });\n }\n\n addProgressListener(xhr.upload, request.onUploadProgress);\n addProgressListener(xhr, request.onDownloadProgress);\n\n if (request.formData) {\n const formData = request.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n request.body = requestForm;\n request.formData = undefined;\n const contentType = request.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n // browser will automatically apply a suitable content-type header\n request.headers.remove(\"Content-Type\");\n }\n }\n\n xhr.open(request.method, request.url);\n xhr.timeout = request.timeout;\n xhr.withCredentials = request.withCredentials;\n for (const header of request.headers.headersArray()) {\n xhr.setRequestHeader(header.name, header.value);\n }\n xhr.responseType = request.streamResponseBody ? \"blob\" : \"text\";\n\n // tslint:disable-next-line:no-null-keyword\n xhr.send(request.body === undefined ? null : request.body);\n\n if (request.streamResponseBody) {\n return new Promise((resolve, reject) => {\n xhr.addEventListener(\"readystatechange\", () => {\n // Resolve as soon as headers are loaded\n if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) {\n const blobBody = new Promise((resolve, reject) => {\n xhr.addEventListener(\"load\", () => {\n resolve(xhr.response);\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n blobBody,\n });\n }\n });\n rejectOnTerminalEvent(request, xhr, reject);\n });\n } else {\n return new Promise(function (resolve, reject) {\n xhr.addEventListener(\"load\", () =>\n resolve({\n request,\n status: xhr.status,\n headers: parseHeaders(xhr),\n bodyAsText: xhr.responseText,\n })\n );\n rejectOnTerminalEvent(request, xhr, reject);\n });\n }\n }\n}\n\nfunction addProgressListener(\n xhr: XMLHttpRequestEventTarget,\n listener?: (progress: TransferProgressEvent) => void\n) {\n if (listener) {\n xhr.addEventListener(\"progress\", (rawEvent) =>\n listener({\n loadedBytes: rawEvent.loaded,\n })\n );\n }\n}\n\n// exported locally for testing\nexport function parseHeaders(xhr: XMLHttpRequest) {\n const responseHeaders = new HttpHeaders();\n const headerLines = xhr\n .getAllResponseHeaders()\n .trim()\n .split(/[\\r\\n]+/);\n for (const line of headerLines) {\n const index = line.indexOf(\":\");\n const headerName = line.slice(0, index);\n const headerValue = line.slice(index + 2);\n responseHeaders.set(headerName, headerValue);\n }\n return responseHeaders;\n}\n\nfunction rejectOnTerminalEvent(\n request: WebResourceLike,\n xhr: XMLHttpRequest,\n reject: (err: any) => void\n) {\n xhr.addEventListener(\"error\", () =>\n reject(\n new RestError(\n `Failed to send request to ${request.url}`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n xhr.addEventListener(\"abort\", () =>\n reject(\n new RestError(\"The request was aborted\", RestError.REQUEST_ABORTED_ERROR, undefined, request)\n )\n );\n xhr.addEventListener(\"timeout\", () =>\n reject(\n new RestError(\n `timeout of ${xhr.timeout}ms exceeded`,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n request\n )\n )\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nconst parser = new DOMParser();\n\n// Policy to make our code Trusted Types compliant.\n// https://github.com/w3c/webappsec-trusted-types\n// We are calling DOMParser.parseFromString() to parse XML payload from Azure services.\n// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing\n// according to the spec. There are no HTML/XSS security concerns on the usage of\n// parseFromString() here.\nlet ttPolicy: Pick | undefined;\nif (typeof self.trustedTypes !== \"undefined\") {\n ttPolicy = self.trustedTypes.createPolicy(\"@azure/ms-rest-js#xml.browser\", {\n createHTML: (s) => s,\n });\n}\n\nexport function parseXML(str: string): Promise {\n try {\n const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, \"application/xml\");\n throwIfError(dom);\n\n const obj = domToObject(dom.childNodes[0]);\n return Promise.resolve(obj);\n } catch (err) {\n return Promise.reject(err);\n }\n}\n\nlet errorNS = \"\";\ntry {\n const invalidXML = (ttPolicy?.createHTML(\"INVALID\") ?? \"INVALID\") as string;\n errorNS =\n parser.parseFromString(invalidXML, \"text/xml\").getElementsByTagName(\"parsererror\")[0]\n .namespaceURI! ?? \"\";\n} catch (ignored) {\n // Most browsers will return a document containing , but IE will throw.\n}\n\nfunction throwIfError(dom: Document) {\n if (errorNS) {\n const parserErrors = dom.getElementsByTagNameNS(errorNS, \"parsererror\");\n if (parserErrors.length) {\n throw new Error(parserErrors.item(0)!.innerHTML);\n }\n }\n}\n\nfunction isElement(node: Node): node is Element {\n return !!(node as Element).attributes;\n}\n\n/**\n * Get the Element-typed version of the provided Node if the provided node is an element with\n * attributes. If it isn't, then undefined is returned.\n */\nfunction asElementWithAttributes(node: Node): Element | undefined {\n return isElement(node) && node.hasAttributes() ? node : undefined;\n}\n\nfunction domToObject(node: Node): any {\n let result: any = {};\n\n const childNodeCount: number = node.childNodes.length;\n\n const firstChildNode: Node = node.childNodes[0];\n const onlyChildTextValue: string | undefined =\n (firstChildNode &&\n childNodeCount === 1 &&\n firstChildNode.nodeType === Node.TEXT_NODE &&\n firstChildNode.nodeValue) ||\n undefined;\n\n const elementWithAttributes: Element | undefined = asElementWithAttributes(node);\n if (elementWithAttributes) {\n result[\"$\"] = {};\n\n for (let i = 0; i < elementWithAttributes.attributes.length; i++) {\n const attr = elementWithAttributes.attributes[i];\n result[\"$\"][attr.nodeName] = attr.nodeValue;\n }\n\n if (onlyChildTextValue) {\n result[\"_\"] = onlyChildTextValue;\n }\n } else if (childNodeCount === 0) {\n result = \"\";\n } else if (onlyChildTextValue) {\n result = onlyChildTextValue;\n }\n\n if (!onlyChildTextValue) {\n for (let i = 0; i < childNodeCount; i++) {\n const child = node.childNodes[i];\n // Ignore leading/trailing whitespace nodes\n if (child.nodeType !== Node.TEXT_NODE) {\n const childObject: any = domToObject(child);\n if (!result[child.nodeName]) {\n result[child.nodeName] = childObject;\n } else if (Array.isArray(result[child.nodeName])) {\n result[child.nodeName].push(childObject);\n } else {\n result[child.nodeName] = [result[child.nodeName], childObject];\n }\n }\n }\n }\n\n return result;\n}\n\n// tslint:disable-next-line:no-null-keyword\nconst doc = document.implementation.createDocument(null, null, null);\nconst serializer = new XMLSerializer();\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const rootName = (opts && opts.rootName) || \"root\";\n const dom = buildNode(obj, rootName)[0];\n return (\n '' + serializer.serializeToString(dom)\n );\n}\n\nfunction buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] {\n const result = [];\n for (const key of Object.keys(attrs)) {\n const attr = doc.createAttribute(key);\n attr.value = attrs[key].toString();\n result.push(attr);\n }\n return result;\n}\n\nfunction buildNode(obj: any, elementName: string): Node[] {\n if (typeof obj === \"string\" || typeof obj === \"number\" || typeof obj === \"boolean\") {\n const elem = doc.createElement(elementName);\n elem.textContent = obj.toString();\n return [elem];\n } else if (Array.isArray(obj)) {\n const result = [];\n for (const arrayElem of obj) {\n for (const child of buildNode(arrayElem, elementName)) {\n result.push(child);\n }\n }\n return result;\n } else if (typeof obj === \"object\") {\n const elem = doc.createElement(elementName);\n for (const key of Object.keys(obj)) {\n if (key === \"$\") {\n for (const attr of buildAttributes(obj[key])) {\n elem.attributes.setNamedItem(attr);\n }\n } else {\n for (const child of buildNode(obj[key], key)) {\n elem.appendChild(child);\n }\n }\n }\n return [elem];\n } else {\n throw new Error(`Illegal value passed to buildObject: ${obj}`);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/*\n * NOTE: When moving this file, please update \"browser\" section in package.json\n * and \"plugins\" section in webpack.testconfig.ts.\n */\n\nimport { TelemetryInfo } from \"./userAgentPolicy\";\n\ninterface NavigatorEx extends Navigator {\n // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2\n readonly oscpu: string;\n}\n\nexport function getDefaultUserAgentKey(): string {\n return \"x-ms-command-name\";\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const navigator = self.navigator as NavigatorEx;\n const osInfo = {\n key: \"OS\",\n value: (navigator.oscpu || navigator.platform).replace(\" \", \"\"),\n };\n\n return [osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst agentNotSupportedInBrowser = new Error(\"AgentPolicy is not supported in browser environment\");\n\nexport function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw agentNotSupportedInBrowser;\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw agentNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw agentNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ProxySettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nconst proxyNotSupportedInBrowser = new Error(\"ProxyPolicy is not supported in browser environment\");\n\nexport function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined {\n return undefined;\n}\n\nexport function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory {\n return {\n create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => {\n throw proxyNotSupportedInBrowser;\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) {\n super(nextPolicy, options);\n throw proxyNotSupportedInBrowser;\n }\n\n public sendRequest(_request: WebResourceLike): Promise {\n throw proxyNotSupportedInBrowser;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js b/node_modules/@azure/ms-rest-js/dist/msRest.node.js new file mode 100644 index 0000000000..761be6a362 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js @@ -0,0 +1,5430 @@ +/** @license ms-rest-js + * Copyright (c) Microsoft Corporation. All rights reserved. + * Licensed under the MIT License. See License.txt and ThirdPartyNotices.txt in the project root for license information. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var uuid = require('uuid'); +var tslib = require('tslib'); +var tough = require('tough-cookie'); +var http = require('http'); +var https = require('https'); +var node_fetch = _interopDefault(require('node-fetch')); +var FormData = _interopDefault(require('form-data')); +var stream = require('stream'); +var tunnel = require('tunnel'); +var xml2js = require('xml2js'); +var os = require('os'); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.2", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +function generateUuid() { + return uuid.v4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); + +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var RestError = /** @class */ (function (_super) { + tslib.__extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return tslib.__generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = tslib.__assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new stream.Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var NodeFetchHttpClient = /** @class */ (function (_super) { + tslib.__extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return tslib.__awaiter(this, void 0, void 0, function () { + return tslib.__generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent; + var _this = this; + return tslib.__generator(this, function (_b) { + switch (_b.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _b.sent(); + httpRequest.headers.set("Cookie", cookieString); + _b.label = 2; + case 2: + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return tslib.__awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Tests an object to determine whether it implements TokenCredential. + * + * @param credential - The assumed TokenCredential to be tested. + */ +function isTokenCredential(credential) { + // Check for an object with a 'getToken' function and possibly with + // a 'signRequest' function. We do this check to make sure that + // a ServiceClientCredentials implementor (like TokenClientCredentials + // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if + // it doesn't actually implement TokenCredential also. + const castCredential = credential; + return (castCredential && + typeof castCredential.getToken === "function" && + (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== exports.HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +var defaultJsonContentTypes = ["application/json", "text/json"]; +var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + tslib.__extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = stripRequest(parsedResponse.request); + error.response = stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = stripRequest(parsedResponse.request); + restError.response = stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + tslib.__extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + tslib.__extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + tslib.__extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} + +function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + tslib.__extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + tslib.__extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry$1(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry$1(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData$1(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry$1(policy, request, operationResponse, err, retryData) { + return tslib.__awaiter(this, void 0, void 0, function () { + var error_1; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData$1(policy, retryData, err); + if (!(err && + err.code && + shouldRetry$1(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry$1(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. All rights reserved. +function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + tslib.__extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @internal + */ +var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + tslib.__extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + tslib.__extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return tslib.__awaiter(this, void 0, void 0, function () { + var _this = this; + return tslib.__generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return tslib.__awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return tslib.__awaiter(this, void 0, void 0, function () { + var accessToken, result; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return tslib.__awaiter(this, void 0, void 0, function () { + var tokenResponse; + return tslib.__generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new NodeFetchHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = tslib.__assign(tslib.__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = tslib.__spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(tslib.__assign(tslib.__assign({}, parsedHeaders), _response.parsedBody)); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + tslib.__extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$1 = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$1; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var HeaderConstants$1 = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME$2 = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME$2; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants$1.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var TopicCredentials = /** @class */ (function (_super) { + tslib.__extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); + +// Copyright (c) Microsoft Corporation. All rights reserved. +var DomainCredentials = /** @class */ (function (_super) { + tslib.__extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); + +exports.ApiKeyCredentials = ApiKeyCredentials; +exports.AzureIdentityCredentialAdapter = AzureIdentityCredentialAdapter; +exports.BaseRequestPolicy = BaseRequestPolicy; +exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; +exports.Constants = Constants; +exports.DefaultHttpClient = NodeFetchHttpClient; +exports.DomainCredentials = DomainCredentials; +exports.HttpHeaders = HttpHeaders; +exports.MapperType = MapperType; +exports.RequestPolicyOptions = RequestPolicyOptions; +exports.RestError = RestError; +exports.Serializer = Serializer; +exports.ServiceClient = ServiceClient; +exports.TokenCredentials = TokenCredentials; +exports.TopicCredentials = TopicCredentials; +exports.URLBuilder = URLBuilder; +exports.URLQuery = URLQuery; +exports.WebResource = WebResource; +exports.agentPolicy = agentPolicy; +exports.applyMixins = applyMixins; +exports.delay = delay; +exports.deserializationPolicy = deserializationPolicy; +exports.deserializeResponseBody = deserializeResponseBody; +exports.encodeUri = encodeUri; +exports.executePromisesSequentially = executePromisesSequentially; +exports.exponentialRetryPolicy = exponentialRetryPolicy; +exports.flattenResponse = flattenResponse; +exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; +exports.generateUuid = generateUuid; +exports.getDefaultProxySettings = getDefaultProxySettings; +exports.getDefaultUserAgentValue = getDefaultUserAgentValue; +exports.isDuration = isDuration; +exports.isNode = isNode; +exports.isValidUuid = isValidUuid; +exports.logPolicy = logPolicy; +exports.promiseToCallback = promiseToCallback; +exports.promiseToServiceCallback = promiseToServiceCallback; +exports.proxyPolicy = proxyPolicy; +exports.redirectPolicy = redirectPolicy; +exports.serializeObject = serializeObject; +exports.signingPolicy = signingPolicy; +exports.stripRequest = stripRequest; +exports.stripResponse = stripResponse; +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +exports.throttlingRetryPolicy = throttlingRetryPolicy; +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=msRest.node.js.map diff --git a/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map new file mode 100644 index 0000000000..e50879e37b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dist/msRest.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.node.js","sources":["../lib/httpHeaders.ts","../lib/util/base64.ts","../lib/util/constants.ts","../lib/util/utils.ts","../lib/serializer.ts","../lib/webResource.ts","../node_modules/event-target-shim/src/event.mjs","../node_modules/event-target-shim/src/event-target.mjs","../node_modules/abort-controller/src/abort-signal.ts","../node_modules/abort-controller/src/abort-controller.ts","../lib/restError.ts","../lib/fetchHttpClient.ts","../lib/url.ts","../lib/proxyAgent.ts","../lib/nodeFetchHttpClient.ts","../lib/httpPipelineLogLevel.ts","../node_modules/@azure/core-auth/src/tokenCredential.ts","../lib/operationParameter.ts","../lib/operationSpec.ts","../lib/util/xml.ts","../lib/policies/requestPolicy.ts","../lib/policies/deserializationPolicy.ts","../lib/policies/exponentialRetryPolicy.ts","../lib/policies/generateClientRequestIdPolicy.ts","../lib/policies/msRestUserAgentPolicy.ts","../lib/policies/userAgentPolicy.ts","../lib/policies/redirectPolicy.ts","../lib/policies/rpRegistrationPolicy.ts","../lib/policies/signingPolicy.ts","../lib/policies/systemErrorRetryPolicy.ts","../lib/queryCollectionFormat.ts","../lib/policies/agentPolicy.ts","../lib/policies/proxyPolicy.ts","../lib/policies/throttlingRetryPolicy.ts","../lib/credentials/azureIdentityTokenCredentialAdapter.ts","../lib/serviceClient.ts","../lib/policies/logPolicy.ts","../lib/credentials/tokenCredentials.ts","../lib/credentials/basicAuthenticationCredentials.ts","../lib/credentials/apiKeyCredentials.ts","../lib/credentials/topicCredentials.ts","../lib/credentials/domainCredentials.ts"],"sourcesContent":["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * A collection of HttpHeaders that can be sent with a HTTP request.\n */\nfunction getHeaderKey(headerName: string) {\n return headerName.toLowerCase();\n}\n\n/**\n * An individual header within a HttpHeaders collection.\n */\nexport interface HttpHeader {\n /**\n * The name of the header.\n */\n name: string;\n\n /**\n * The value of the header.\n */\n value: string;\n}\n\n/**\n * A HttpHeaders collection represented as a simple JSON object.\n */\nexport type RawHttpHeaders = { [headerName: string]: string };\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport interface HttpHeadersLike {\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n set(headerName: string, headerValue: string | number): void;\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n get(headerName: string): string | undefined;\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n contains(headerName: string): boolean;\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n remove(headerName: string): boolean;\n /**\n * Get the headers that are contained this collection as an object.\n */\n rawHeaders(): RawHttpHeaders;\n /**\n * Get the headers that are contained in this collection as an array.\n */\n headersArray(): HttpHeader[];\n /**\n * Get the header names that are contained in this collection.\n */\n headerNames(): string[];\n /**\n * Get the header values that are contained in this collection.\n */\n headerValues(): string[];\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n clone(): HttpHeadersLike;\n /**\n * Get the JSON object representation of this HTTP header collection.\n * The result is the same as `rawHeaders()`.\n */\n toJson(): RawHttpHeaders;\n}\n\nexport function isHttpHeadersLike(object?: any): object is HttpHeadersLike {\n if (!object || typeof object !== \"object\") {\n return false;\n }\n\n if (\n typeof object.rawHeaders === \"function\" &&\n typeof object.clone === \"function\" &&\n typeof object.get === \"function\" &&\n typeof object.set === \"function\" &&\n typeof object.contains === \"function\" &&\n typeof object.remove === \"function\" &&\n typeof object.headersArray === \"function\" &&\n typeof object.headerValues === \"function\" &&\n typeof object.headerNames === \"function\" &&\n typeof object.toJson === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * A collection of HTTP header key/value pairs.\n */\nexport class HttpHeaders {\n private readonly _headersMap: { [headerKey: string]: HttpHeader };\n\n constructor(rawHeaders?: RawHttpHeaders) {\n this._headersMap = {};\n if (rawHeaders) {\n for (const headerName in rawHeaders) {\n this.set(headerName, rawHeaders[headerName]);\n }\n }\n }\n\n /**\n * Set a header in this collection with the provided name and value. The name is\n * case-insensitive.\n * @param headerName The name of the header to set. This value is case-insensitive.\n * @param headerValue The value of the header to set.\n */\n public set(headerName: string, headerValue: string | number): void {\n this._headersMap[getHeaderKey(headerName)] = {\n name: headerName,\n value: headerValue.toString(),\n };\n }\n\n /**\n * Get the header value for the provided header name, or undefined if no header exists in this\n * collection with the provided name.\n * @param headerName The name of the header.\n */\n public get(headerName: string): string | undefined {\n const header: HttpHeader = this._headersMap[getHeaderKey(headerName)];\n return !header ? undefined : header.value;\n }\n\n /**\n * Get whether or not this header collection contains a header entry for the provided header name.\n */\n public contains(headerName: string): boolean {\n return !!this._headersMap[getHeaderKey(headerName)];\n }\n\n /**\n * Remove the header with the provided headerName. Return whether or not the header existed and\n * was removed.\n * @param headerName The name of the header to remove.\n */\n public remove(headerName: string): boolean {\n const result: boolean = this.contains(headerName);\n delete this._headersMap[getHeaderKey(headerName)];\n return result;\n }\n\n /**\n * Get the headers that are contained this collection as an object.\n */\n public rawHeaders(): RawHttpHeaders {\n const result: RawHttpHeaders = {};\n for (const headerKey in this._headersMap) {\n const header: HttpHeader = this._headersMap[headerKey];\n result[header.name.toLowerCase()] = header.value;\n }\n return result;\n }\n\n /**\n * Get the headers that are contained in this collection as an array.\n */\n public headersArray(): HttpHeader[] {\n const headers: HttpHeader[] = [];\n for (const headerKey in this._headersMap) {\n headers.push(this._headersMap[headerKey]);\n }\n return headers;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerNames(): string[] {\n const headerNames: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerNames.push(headers[i].name);\n }\n return headerNames;\n }\n\n /**\n * Get the header names that are contained in this collection.\n */\n public headerValues(): string[] {\n const headerValues: string[] = [];\n const headers: HttpHeader[] = this.headersArray();\n for (let i = 0; i < headers.length; ++i) {\n headerValues.push(headers[i].value);\n }\n return headerValues;\n }\n\n /**\n * Get the JSON object representation of this HTTP header collection.\n */\n public toJson(): RawHttpHeaders {\n return this.rawHeaders();\n }\n\n /**\n * Get the string representation of this HTTP header collection.\n */\n public toString(): string {\n return JSON.stringify(this.toJson());\n }\n\n /**\n * Create a deep clone/copy of this HttpHeaders collection.\n */\n public clone(): HttpHeaders {\n return new HttpHeaders(this.rawHeaders());\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * Encodes a string in base64 format.\n * @param value the string to encode\n */\nexport function encodeString(value: string): string {\n return Buffer.from(value).toString(\"base64\");\n}\n\n/**\n * Encodes a byte array in base64 format.\n * @param value the Uint8Aray to encode\n */\nexport function encodeByteArray(value: Uint8Array): string {\n // Buffer.from accepts | -- the TypeScript definition is off here\n // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length\n const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer);\n return bufferValue.toString(\"base64\");\n}\n\n/**\n * Decodes a base64 string into a byte array.\n * @param value the base64 string to decode\n */\nexport function decodeString(value: string): Uint8Array {\n return Buffer.from(value, \"base64\");\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nexport const Constants = {\n /**\n * The ms-rest version\n * @const\n * @type {string}\n */\n msRestVersion: \"2.6.2\",\n\n /**\n * Specifies HTTP.\n *\n * @const\n * @type {string}\n */\n HTTP: \"http:\",\n\n /**\n * Specifies HTTPS.\n *\n * @const\n * @type {string}\n */\n HTTPS: \"https:\",\n\n /**\n * Specifies HTTP Proxy.\n *\n * @const\n * @type {string}\n */\n HTTP_PROXY: \"HTTP_PROXY\",\n\n /**\n * Specifies HTTPS Proxy.\n *\n * @const\n * @type {string}\n */\n HTTPS_PROXY: \"HTTPS_PROXY\",\n\n /**\n * Specifies NO Proxy.\n */\n NO_PROXY: \"NO_PROXY\",\n\n /**\n * Specifies ALL Proxy.\n */\n ALL_PROXY: \"ALL_PROXY\",\n\n HttpConstants: {\n /**\n * Http Verbs\n *\n * @const\n * @enum {string}\n */\n HttpVerbs: {\n PUT: \"PUT\",\n GET: \"GET\",\n DELETE: \"DELETE\",\n POST: \"POST\",\n MERGE: \"MERGE\",\n HEAD: \"HEAD\",\n PATCH: \"PATCH\",\n },\n\n StatusCodes: {\n TooManyRequests: 429,\n },\n },\n\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n *\n * @const\n * @type {string}\n */\n AUTHORIZATION: \"authorization\",\n\n AUTHORIZATION_SCHEME: \"Bearer\",\n\n /**\n * The Retry-After response-header field can be used with a 503 (Service\n * Unavailable) or 349 (Too Many Requests) responses to indicate how long\n * the service is expected to be unavailable to the requesting client.\n *\n * @const\n * @type {string}\n */\n RETRY_AFTER: \"Retry-After\",\n\n /**\n * The UserAgent header.\n *\n * @const\n * @type {string}\n */\n USER_AGENT: \"User-Agent\",\n },\n};\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { v4 as uuidv4 } from \"uuid\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { RestError } from \"../restError\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"./constants\";\n\n/**\n * A constant that indicates whether the environment is node.js or browser based.\n */\nexport const isNode =\n typeof process !== \"undefined\" &&\n !!process.version &&\n !!process.versions &&\n !!process.versions.node;\n\n/**\n * Checks if a parsed URL is HTTPS\n *\n * @param {object} urlToCheck The url to check\n * @return {boolean} True if the URL is HTTPS; false otherwise.\n */\nexport function urlIsHTTPS(urlToCheck: { protocol: string }): boolean {\n return urlToCheck.protocol.toLowerCase() === Constants.HTTPS;\n}\n\n/**\n * Encodes an URI.\n *\n * @param {string} uri The URI to be encoded.\n * @return {string} The encoded URI.\n */\nexport function encodeUri(uri: string): string {\n return encodeURIComponent(uri)\n .replace(/!/g, \"%21\")\n .replace(/\"/g, \"%27\")\n .replace(/\\(/g, \"%28\")\n .replace(/\\)/g, \"%29\")\n .replace(/\\*/g, \"%2A\");\n}\n\n/**\n * Returns a stripped version of the Http Response which only contains body,\n * headers and the status.\n *\n * @param {HttpOperationResponse} response The Http Response\n *\n * @return {object} The stripped version of Http Response.\n */\nexport function stripResponse(response: HttpOperationResponse): any {\n const strippedResponse: any = {};\n strippedResponse.body = response.bodyAsText;\n strippedResponse.headers = response.headers;\n strippedResponse.status = response.status;\n return strippedResponse;\n}\n\n/**\n * Returns a stripped version of the Http Request that does not contain the\n * Authorization header.\n *\n * @param {WebResource} request The Http Request object\n *\n * @return {WebResource} The stripped version of Http Request.\n */\nexport function stripRequest(request: WebResourceLike): WebResourceLike {\n const strippedRequest = request.clone();\n if (strippedRequest.headers) {\n strippedRequest.headers.remove(\"authorization\");\n }\n return strippedRequest;\n}\n\n/**\n * Validates the given uuid as a string\n *\n * @param {string} uuid The uuid as a string that needs to be validated\n *\n * @return {boolean} True if the uuid is valid; false otherwise.\n */\nexport function isValidUuid(uuid: string): boolean {\n const validUuidRegex = new RegExp(\n \"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$\",\n \"ig\"\n );\n return validUuidRegex.test(uuid);\n}\n\n/**\n * Provides an array of values of an object. For example\n * for a given object { \"a\": \"foo\", \"b\": \"bar\" }, the method returns [\"foo\", \"bar\"].\n *\n * @param {object} obj An object whose properties need to be enumerated so that it\"s values can be provided as an array\n *\n * @return {any[]} An array of values of the given object.\n */\nexport function objectValues(obj: { [key: string]: any }): any[] {\n const result: any[] = [];\n if (obj && obj instanceof Object) {\n for (const key in obj) {\n if (obj.hasOwnProperty(key)) {\n result.push((obj)[key]);\n }\n }\n } else {\n throw new Error(\n `The provided object ${JSON.stringify(\n obj,\n undefined,\n 2\n )} is not a valid object that can be ` + `enumerated to provide its values as an array.`\n );\n }\n return result;\n}\n\n/**\n * Generated UUID\n *\n * @return {string} RFC4122 v4 UUID.\n */\nexport function generateUuid(): string {\n return uuidv4();\n}\n\n/**\n * Executes an array of promises sequentially. Inspiration of this method is here:\n * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises!\n *\n * @param {Array} promiseFactories An array of promise factories(A function that return a promise)\n *\n * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain.\n * If not provided then the promise chain starts with undefined.\n *\n * @return A chain of resolved or rejected promises\n */\nexport function executePromisesSequentially(promiseFactories: Array, kickstart: any) {\n let result = Promise.resolve(kickstart);\n promiseFactories.forEach((promiseFactory) => {\n result = result.then(promiseFactory);\n });\n return result;\n}\n\n/**\n * Merges source object into the target object\n * @param {object} source The object that needs to be merged\n *\n * @param {object} target The object to be merged into\n *\n * @returns {object} Returns the merged target object.\n */\nexport function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) {\n Object.keys(source).forEach((key) => {\n target[key] = source[key];\n });\n return target;\n}\n\n/**\n * A wrapper for setTimeout that resolves a promise after t milliseconds.\n * @param {number} t The number of milliseconds to be delayed.\n * @param {T} value The value to be resolved with after a timeout of t milliseconds.\n * @returns {Promise} Resolved promise\n */\nexport function delay(t: number, value?: T): Promise {\n return new Promise((resolve) => setTimeout(() => resolve(value), t));\n}\n\n/**\n * Service callback that is returned for REST requests initiated by the service client.\n */\nexport interface ServiceCallback {\n /**\n * A method that will be invoked as a callback to a service function.\n * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null.\n * @param {TResult} [result] The deserialized response body if an error did not occur.\n * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur.\n * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur.\n */\n (\n err: Error | RestError | null,\n result?: TResult,\n request?: WebResourceLike,\n response?: HttpOperationResponse\n ): void;\n}\n\n/**\n * Converts a Promise to a callback.\n * @param {Promise} promise The Promise to be converted to a callback\n * @returns {Function} A function that takes the callback (cb: Function): void\n * @deprecated generated code should instead depend on responseToBody\n */\nexport function promiseToCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: Function): void => {\n promise.then(\n (data: any) => {\n cb(undefined, data);\n },\n (err: Error) => {\n cb(err);\n }\n );\n };\n}\n\n/**\n * Converts a Promise to a service callback.\n * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback\n * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void\n */\nexport function promiseToServiceCallback(promise: Promise): Function {\n if (typeof promise.then !== \"function\") {\n throw new Error(\"The provided input is not a Promise.\");\n }\n return (cb: ServiceCallback): void => {\n promise.then(\n (data: HttpOperationResponse) => {\n process.nextTick(cb, undefined, data.parsedBody as T, data.request, data);\n },\n (err: Error) => {\n process.nextTick(cb, err);\n }\n );\n };\n}\n\nexport function prepareXMLRootList(obj: any, elementName: string) {\n if (!Array.isArray(obj)) {\n obj = [obj];\n }\n return { [elementName]: obj };\n}\n\n/**\n * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor\n * @param {object} targetCtor The target object on which the properties need to be applied.\n * @param {Array} sourceCtors An array of source objects from which the properties need to be taken.\n */\nexport function applyMixins(targetCtor: any, sourceCtors: any[]): void {\n sourceCtors.forEach((sourceCtors) => {\n Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => {\n targetCtor.prototype[name] = sourceCtors.prototype[name];\n });\n });\n}\n\nconst validateISODuration = /^(-|\\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;\n\n/**\n * Indicates whether the given string is in ISO 8601 format.\n * @param {string} value The value to be validated for ISO 8601 duration format.\n * @return {boolean} `true` if valid, `false` otherwise.\n */\nexport function isDuration(value: string): boolean {\n return validateISODuration.test(value);\n}\n\n/**\n * Replace all of the instances of searchValue in value with the provided replaceValue.\n * @param {string | undefined} value The value to search and replace in.\n * @param {string} searchValue The value to search for in the value argument.\n * @param {string} replaceValue The value to replace searchValue with in the value argument.\n * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue.\n */\nexport function replaceAll(\n value: string | undefined,\n searchValue: string,\n replaceValue: string\n): string | undefined {\n return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || \"\");\n}\n\n/**\n * Determines whether the given enity is a basic/primitive type\n * (string, number, boolean, null, undefined).\n * @param value Any entity\n * @return boolean - true is it is primitive type, false otherwise.\n */\nexport function isPrimitiveType(value: any): boolean {\n return (typeof value !== \"object\" && typeof value !== \"function\") || value === null;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as base64 from \"./util/base64\";\nimport * as utils from \"./util/utils\";\n\nexport class Serializer {\n constructor(\n public readonly modelMappers: { [key: string]: any } = {},\n public readonly isXML?: boolean\n ) {}\n\n validateConstraints(mapper: Mapper, value: any, objectName: string): void {\n const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => {\n throw new Error(\n `\"${objectName}\" with value \"${value}\" should satisfy the constraint \"${constraintName}\": ${constraintValue}.`\n );\n };\n if (mapper.constraints && value != undefined) {\n const {\n ExclusiveMaximum,\n ExclusiveMinimum,\n InclusiveMaximum,\n InclusiveMinimum,\n MaxItems,\n MaxLength,\n MinItems,\n MinLength,\n MultipleOf,\n Pattern,\n UniqueItems,\n } = mapper.constraints;\n if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) {\n failValidation(\"ExclusiveMaximum\", ExclusiveMaximum);\n }\n if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) {\n failValidation(\"ExclusiveMinimum\", ExclusiveMinimum);\n }\n if (InclusiveMaximum != undefined && value > InclusiveMaximum) {\n failValidation(\"InclusiveMaximum\", InclusiveMaximum);\n }\n if (InclusiveMinimum != undefined && value < InclusiveMinimum) {\n failValidation(\"InclusiveMinimum\", InclusiveMinimum);\n }\n if (MaxItems != undefined && value.length > MaxItems) {\n failValidation(\"MaxItems\", MaxItems);\n }\n if (MaxLength != undefined && value.length > MaxLength) {\n failValidation(\"MaxLength\", MaxLength);\n }\n if (MinItems != undefined && value.length < MinItems) {\n failValidation(\"MinItems\", MinItems);\n }\n if (MinLength != undefined && value.length < MinLength) {\n failValidation(\"MinLength\", MinLength);\n }\n if (MultipleOf != undefined && value % MultipleOf !== 0) {\n failValidation(\"MultipleOf\", MultipleOf);\n }\n if (Pattern) {\n const pattern: RegExp = typeof Pattern === \"string\" ? new RegExp(Pattern) : Pattern;\n if (typeof value !== \"string\" || value.match(pattern) === null) {\n failValidation(\"Pattern\", Pattern);\n }\n }\n if (\n UniqueItems &&\n value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i)\n ) {\n failValidation(\"UniqueItems\", UniqueItems);\n }\n }\n }\n\n /**\n * Serialize the given object based on its metadata defined in the mapper\n *\n * @param {Mapper} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized\n *\n * @param {string} objectName Name of the serialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object\n */\n serialize(mapper: Mapper, object: any, objectName?: string): any {\n let payload: any = {};\n const mapperType = mapper.type.name as string;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = [];\n }\n\n if (mapper.isConstant) {\n object = mapper.defaultValue;\n }\n\n // This table of allowed values should help explain\n // the mapper.required and mapper.nullable properties.\n // X means \"neither undefined or null are allowed\".\n // || required\n // || true | false\n // nullable || ==========================\n // true || null | undefined/null\n // false || X | undefined\n // undefined || X | undefined/null\n\n const { required, nullable } = mapper;\n\n if (required && nullable && object === undefined) {\n throw new Error(`${objectName} cannot be undefined.`);\n }\n if (required && !nullable && object == undefined) {\n throw new Error(`${objectName} cannot be null or undefined.`);\n }\n if (!required && nullable === false && object === null) {\n throw new Error(`${objectName} cannot be null.`);\n }\n\n if (object == undefined) {\n payload = object;\n } else {\n // Validate Constraints if any\n this.validateConstraints(mapper, object, objectName);\n if (mapperType.match(/^any$/gi) !== null) {\n payload = object;\n } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) {\n payload = serializeBasicTypes(mapperType, objectName, object);\n } else if (mapperType.match(/^Enum$/gi) !== null) {\n const enumMapper: EnumMapper = mapper as EnumMapper;\n payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object);\n } else if (\n mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null\n ) {\n payload = serializeDateTypes(mapperType, object, objectName);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = serializeByteArrayType(objectName, object);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = serializeBase64UrlType(objectName, object);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName);\n } else if (mapperType.match(/^Composite$/gi) !== null) {\n payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName);\n }\n }\n return payload;\n }\n\n /**\n * Deserialize the given object based on its metadata defined in the mapper\n *\n * @param {object} mapper The mapper which defines the metadata of the serializable object\n *\n * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized\n *\n * @param {string} objectName Name of the deserialized object\n *\n * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object\n */\n deserialize(mapper: Mapper, responseBody: any, objectName: string): any {\n if (responseBody == undefined) {\n if (this.isXML && mapper.type.name === \"Sequence\" && !mapper.xmlIsWrapped) {\n // Edge case for empty XML non-wrapped lists. xml2js can't distinguish\n // between the list being empty versus being missing,\n // so let's do the more user-friendly thing and return an empty list.\n responseBody = [];\n }\n // specifically check for undefined as default value can be a falsey value `0, \"\", false, null`\n if (mapper.defaultValue !== undefined) {\n responseBody = mapper.defaultValue;\n }\n return responseBody;\n }\n\n let payload: any;\n const mapperType = mapper.type.name;\n if (!objectName) {\n objectName = mapper.serializedName!;\n }\n\n if (mapperType.match(/^Composite$/gi) !== null) {\n payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName);\n } else {\n if (this.isXML) {\n /**\n * If the mapper specifies this as a non-composite type value but the responseBody contains\n * both header (\"$\") and body (\"_\") properties, then just reduce the responseBody value to\n * the body (\"_\") property.\n */\n if (responseBody[\"$\"] != undefined && responseBody[\"_\"] != undefined) {\n responseBody = responseBody[\"_\"];\n }\n }\n\n if (mapperType.match(/^Number$/gi) !== null) {\n payload = parseFloat(responseBody);\n if (isNaN(payload)) {\n payload = responseBody;\n }\n } else if (mapperType.match(/^Boolean$/gi) !== null) {\n if (responseBody === \"true\") {\n payload = true;\n } else if (responseBody === \"false\") {\n payload = false;\n } else {\n payload = responseBody;\n }\n } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) {\n payload = responseBody;\n } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) {\n payload = new Date(responseBody);\n } else if (mapperType.match(/^UnixTime$/gi) !== null) {\n payload = unixTimeToDate(responseBody);\n } else if (mapperType.match(/^ByteArray$/gi) !== null) {\n payload = base64.decodeString(responseBody);\n } else if (mapperType.match(/^Base64Url$/gi) !== null) {\n payload = base64UrlToByteArray(responseBody);\n } else if (mapperType.match(/^Sequence$/gi) !== null) {\n payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName);\n } else if (mapperType.match(/^Dictionary$/gi) !== null) {\n payload = deserializeDictionaryType(\n this,\n mapper as DictionaryMapper,\n responseBody,\n objectName\n );\n }\n }\n\n if (mapper.isConstant) {\n payload = mapper.defaultValue;\n }\n\n return payload;\n }\n}\n\nfunction trimEnd(str: string, ch: string) {\n let len = str.length;\n while (len - 1 >= 0 && str[len - 1] === ch) {\n --len;\n }\n return str.substr(0, len);\n}\n\nfunction bufferToBase64Url(buffer: any): string | undefined {\n if (!buffer) {\n return undefined;\n }\n if (!(buffer instanceof Uint8Array)) {\n throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`);\n }\n // Uint8Array to Base64.\n const str = base64.encodeByteArray(buffer);\n // Base64 to Base64Url.\n return trimEnd(str, \"=\").replace(/\\+/g, \"-\").replace(/\\//g, \"_\");\n}\n\nfunction base64UrlToByteArray(str: string): Uint8Array | undefined {\n if (!str) {\n return undefined;\n }\n if (str && typeof str.valueOf() !== \"string\") {\n throw new Error(\"Please provide an input of type string for converting to Uint8Array\");\n }\n // Base64Url to Base64.\n str = str.replace(/\\-/g, \"+\").replace(/\\_/g, \"/\");\n // Base64 to Uint8Array.\n return base64.decodeString(str);\n}\n\nfunction splitSerializeName(prop: string | undefined): string[] {\n const classes: string[] = [];\n let partialclass = \"\";\n if (prop) {\n const subwords = prop.split(\".\");\n\n for (const item of subwords) {\n if (item.charAt(item.length - 1) === \"\\\\\") {\n partialclass += item.substr(0, item.length - 1) + \".\";\n } else {\n partialclass += item;\n classes.push(partialclass);\n partialclass = \"\";\n }\n }\n }\n\n return classes;\n}\n\nfunction dateToUnixTime(d: string | Date): number | undefined {\n if (!d) {\n return undefined;\n }\n\n if (typeof d.valueOf() === \"string\") {\n d = new Date(d as string);\n }\n return Math.floor((d as Date).getTime() / 1000);\n}\n\nfunction unixTimeToDate(n: number): Date | undefined {\n if (!n) {\n return undefined;\n }\n return new Date(n * 1000);\n}\n\nfunction serializeBasicTypes(typeName: string, objectName: string, value: any): any {\n if (value !== null && value !== undefined) {\n if (typeName.match(/^Number$/gi) !== null) {\n if (typeof value !== \"number\") {\n throw new Error(`${objectName} with value ${value} must be of type number.`);\n }\n } else if (typeName.match(/^String$/gi) !== null) {\n if (typeof value.valueOf() !== \"string\") {\n throw new Error(`${objectName} with value \"${value}\" must be of type string.`);\n }\n } else if (typeName.match(/^Uuid$/gi) !== null) {\n if (!(typeof value.valueOf() === \"string\" && utils.isValidUuid(value))) {\n throw new Error(\n `${objectName} with value \"${value}\" must be of type string and a valid uuid.`\n );\n }\n } else if (typeName.match(/^Boolean$/gi) !== null) {\n if (typeof value !== \"boolean\") {\n throw new Error(`${objectName} with value ${value} must be of type boolean.`);\n }\n } else if (typeName.match(/^Stream$/gi) !== null) {\n const objectType = typeof value;\n if (\n objectType !== \"string\" &&\n objectType !== \"function\" &&\n !(value instanceof ArrayBuffer) &&\n !ArrayBuffer.isView(value) &&\n !(typeof Blob === \"function\" && value instanceof Blob)\n ) {\n throw new Error(\n `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`\n );\n }\n }\n }\n return value;\n}\n\nfunction serializeEnumType(objectName: string, allowedValues: Array, value: any): any {\n if (!allowedValues) {\n throw new Error(\n `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`\n );\n }\n const isPresent = allowedValues.some((item) => {\n if (typeof item.valueOf() === \"string\") {\n return item.toLowerCase() === value.toLowerCase();\n }\n return item === value;\n });\n if (!isPresent) {\n throw new Error(\n `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(\n allowedValues\n )}.`\n );\n }\n return value;\n}\n\nfunction serializeByteArrayType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = base64.encodeByteArray(value);\n }\n return value;\n}\n\nfunction serializeBase64UrlType(objectName: string, value: any): any {\n if (value != undefined) {\n if (!(value instanceof Uint8Array)) {\n throw new Error(`${objectName} must be of type Uint8Array.`);\n }\n value = bufferToBase64Url(value);\n }\n return value;\n}\n\nfunction serializeDateTypes(typeName: string, value: any, objectName: string) {\n if (value != undefined) {\n if (typeName.match(/^Date$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value =\n value instanceof Date\n ? value.toISOString().substring(0, 10)\n : new Date(value).toISOString().substring(0, 10);\n } else if (typeName.match(/^DateTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`);\n }\n value = value instanceof Date ? value.toISOString() : new Date(value).toISOString();\n } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`);\n }\n value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString();\n } else if (typeName.match(/^UnixTime$/gi) !== null) {\n if (\n !(\n value instanceof Date ||\n (typeof value.valueOf() === \"string\" && !isNaN(Date.parse(value)))\n )\n ) {\n throw new Error(\n `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` +\n `for it to be serialized in UnixTime/Epoch format.`\n );\n }\n value = dateToUnixTime(value);\n } else if (typeName.match(/^TimeSpan$/gi) !== null) {\n if (!utils.isDuration(value)) {\n throw new Error(\n `${objectName} must be a string in ISO 8601 format. Instead was \"${value}\".`\n );\n }\n value = value;\n }\n }\n return value;\n}\n\nfunction serializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n object: any,\n objectName: string\n) {\n if (!Array.isArray(object)) {\n throw new Error(`${objectName} must be of type Array.`);\n }\n const elementType = mapper.type.element;\n if (!elementType || typeof elementType !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempArray = [];\n for (let i = 0; i < object.length; i++) {\n tempArray[i] = serializer.serialize(elementType, object[i], objectName);\n }\n return tempArray;\n}\n\nfunction serializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n object: any,\n objectName: string\n) {\n if (typeof object !== \"object\") {\n throw new Error(`${objectName} must be of type object.`);\n }\n const valueType = mapper.type.value;\n if (!valueType || typeof valueType !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}.`\n );\n }\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(object)) {\n tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + \".\" + key);\n }\n return tempDictionary;\n}\n\n/**\n * Resolves a composite mapper's modelProperties.\n * @param serializer the serializer containing the entire set of mappers\n * @param mapper the composite mapper to resolve\n */\nfunction resolveModelProperties(\n serializer: Serializer,\n mapper: CompositeMapper,\n objectName: string\n): { [propertyName: string]: Mapper } {\n let modelProps = mapper.type.modelProperties;\n if (!modelProps) {\n const className = mapper.type.className;\n if (!className) {\n throw new Error(\n `Class name for model \"${objectName}\" is not provided in the mapper \"${JSON.stringify(\n mapper,\n undefined,\n 2\n )}\".`\n );\n }\n\n const modelMapper = serializer.modelMappers[className];\n if (!modelMapper) {\n throw new Error(`mapper() cannot be null or undefined for model \"${className}\".`);\n }\n modelProps = modelMapper.type.modelProperties;\n if (!modelProps) {\n throw new Error(\n `modelProperties cannot be null or undefined in the ` +\n `mapper \"${JSON.stringify(\n modelMapper\n )}\" of type \"${className}\" for object \"${objectName}\".`\n );\n }\n }\n\n return modelProps;\n}\n\nfunction serializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n objectName: string\n) {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, object, \"clientName\");\n }\n\n if (object != undefined) {\n const payload: any = {};\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n if (propertyMapper.readOnly) {\n continue;\n }\n\n let propName: string | undefined;\n let parentObject: any = payload;\n if (serializer.isXML) {\n if (propertyMapper.xmlIsWrapped) {\n propName = propertyMapper.xmlName;\n } else {\n propName = propertyMapper.xmlElementName || propertyMapper.xmlName;\n }\n } else {\n const paths = splitSerializeName(propertyMapper.serializedName!);\n propName = paths.pop();\n\n for (const pathName of paths) {\n const childObject = parentObject[pathName];\n if (childObject == undefined && object[key] != undefined) {\n parentObject[pathName] = {};\n }\n parentObject = parentObject[pathName];\n }\n }\n\n if (parentObject != undefined) {\n const propertyObjectName =\n propertyMapper.serializedName !== \"\"\n ? objectName + \".\" + propertyMapper.serializedName\n : objectName;\n\n let toSerialize = object[key];\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (\n polymorphicDiscriminator &&\n polymorphicDiscriminator.clientName === key &&\n toSerialize == undefined\n ) {\n toSerialize = mapper.serializedName;\n }\n\n const serializedValue = serializer.serialize(\n propertyMapper,\n toSerialize,\n propertyObjectName\n );\n if (serializedValue !== undefined && propName != undefined) {\n if (propertyMapper.xmlIsAttribute) {\n // $ is the key attributes are kept under in xml2js.\n // This keeps things simple while preventing name collision\n // with names in user documents.\n parentObject.$ = parentObject.$ || {};\n parentObject.$[propName] = serializedValue;\n } else if (propertyMapper.xmlIsWrapped) {\n parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue };\n } else {\n parentObject[propName] = serializedValue;\n }\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const propNames = Object.keys(modelProps);\n for (const clientPropName in object) {\n const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName);\n if (isAdditionalProperty) {\n payload[clientPropName] = serializer.serialize(\n additionalPropertiesMapper,\n object[clientPropName],\n objectName + '[\"' + clientPropName + '\"]'\n );\n }\n }\n }\n\n return payload;\n }\n return object;\n}\n\nfunction isSpecialXmlProperty(propertyName: string): boolean {\n return [\"$\", \"_\"].includes(propertyName);\n}\n\nfunction deserializeCompositeType(\n serializer: Serializer,\n mapper: CompositeMapper,\n responseBody: any,\n objectName: string\n): any {\n if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) {\n mapper = getPolymorphicMapper(serializer, mapper, responseBody, \"serializedName\");\n }\n\n const modelProps = resolveModelProperties(serializer, mapper, objectName);\n let instance: { [key: string]: any } = {};\n const handledPropertyNames: string[] = [];\n\n for (const key of Object.keys(modelProps)) {\n const propertyMapper = modelProps[key];\n const paths = splitSerializeName(modelProps[key].serializedName!);\n handledPropertyNames.push(paths[0]);\n const { serializedName, xmlName, xmlElementName } = propertyMapper;\n let propertyObjectName = objectName;\n if (serializedName !== \"\" && serializedName !== undefined) {\n propertyObjectName = objectName + \".\" + serializedName;\n }\n\n const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix;\n if (headerCollectionPrefix) {\n const dictionary: any = {};\n for (const headerKey of Object.keys(responseBody)) {\n if (headerKey.startsWith(headerCollectionPrefix)) {\n dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(\n (propertyMapper as DictionaryMapper).type.value,\n responseBody[headerKey],\n propertyObjectName\n );\n }\n\n handledPropertyNames.push(headerKey);\n }\n instance[key] = dictionary;\n } else if (serializer.isXML) {\n if (propertyMapper.xmlIsAttribute && responseBody.$) {\n instance[key] = serializer.deserialize(\n propertyMapper,\n responseBody.$[xmlName!],\n propertyObjectName\n );\n } else {\n const propertyName = xmlElementName || xmlName || serializedName;\n let unwrappedProperty = responseBody[propertyName!];\n if (propertyMapper.xmlIsWrapped) {\n unwrappedProperty = responseBody[xmlName!];\n unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!];\n\n const isEmptyWrappedList = unwrappedProperty === undefined;\n if (isEmptyWrappedList) {\n unwrappedProperty = [];\n }\n }\n instance[key] = serializer.deserialize(\n propertyMapper,\n unwrappedProperty,\n propertyObjectName\n );\n }\n } else {\n // deserialize the property if it is present in the provided responseBody instance\n let propertyInstance;\n let res = responseBody;\n // traversing the object step by step.\n for (const item of paths) {\n if (!res) break;\n res = res[item];\n }\n propertyInstance = res;\n const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator;\n // checking that the model property name (key)(ex: \"fishtype\") and the\n // clientName of the polymorphicDiscriminator {metadata} (ex: \"fishtype\")\n // instead of the serializedName of the polymorphicDiscriminator (ex: \"fish.type\")\n // is a better approach. The generator is not consistent with escaping '\\.' in the\n // serializedName of the property (ex: \"fish\\.type\") that is marked as polymorphic discriminator\n // and the serializedName of the metadata polymorphicDiscriminator (ex: \"fish.type\"). However,\n // the clientName transformation of the polymorphicDiscriminator (ex: \"fishtype\") and\n // the transformation of model property name (ex: \"fishtype\") is done consistently.\n // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator.\n if (\n polymorphicDiscriminator &&\n key === polymorphicDiscriminator.clientName &&\n propertyInstance == undefined\n ) {\n propertyInstance = mapper.serializedName;\n }\n\n let serializedValue;\n // paging\n if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === \"\") {\n propertyInstance = responseBody[key];\n const arrayInstance = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n // Copy over any properties that have already been added into the instance, where they do\n // not exist on the newly de-serialized array\n for (const [key, value] of Object.entries(instance)) {\n if (!arrayInstance.hasOwnProperty(key)) {\n arrayInstance[key] = value;\n }\n }\n instance = arrayInstance;\n } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) {\n serializedValue = serializer.deserialize(\n propertyMapper,\n propertyInstance,\n propertyObjectName\n );\n instance[key] = serializedValue;\n }\n }\n }\n\n const additionalPropertiesMapper = mapper.type.additionalProperties;\n if (additionalPropertiesMapper) {\n const isAdditionalProperty = (responsePropName: string) => {\n for (const clientPropName in modelProps) {\n const paths = splitSerializeName(modelProps[clientPropName].serializedName);\n if (paths[0] === responsePropName) {\n return false;\n }\n }\n return true;\n };\n\n for (const responsePropName in responseBody) {\n if (isAdditionalProperty(responsePropName)) {\n instance[responsePropName] = serializer.deserialize(\n additionalPropertiesMapper,\n responseBody[responsePropName],\n objectName + '[\"' + responsePropName + '\"]'\n );\n }\n }\n } else if (responseBody) {\n for (const key of Object.keys(responseBody)) {\n if (\n instance[key] === undefined &&\n !handledPropertyNames.includes(key) &&\n !isSpecialXmlProperty(key)\n ) {\n instance[key] = responseBody[key];\n }\n }\n }\n\n return instance;\n}\n\nfunction deserializeDictionaryType(\n serializer: Serializer,\n mapper: DictionaryMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const value = mapper.type.value;\n if (!value || typeof value !== \"object\") {\n throw new Error(\n `\"value\" metadata for a Dictionary must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n const tempDictionary: { [key: string]: any } = {};\n for (const key of Object.keys(responseBody)) {\n tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName);\n }\n return tempDictionary;\n }\n return responseBody;\n}\n\nfunction deserializeSequenceType(\n serializer: Serializer,\n mapper: SequenceMapper,\n responseBody: any,\n objectName: string\n): any {\n /*jshint validthis: true */\n const element = mapper.type.element;\n if (!element || typeof element !== \"object\") {\n throw new Error(\n `element\" metadata for an Array must be defined in the ` +\n `mapper and it must of type \"object\" in ${objectName}`\n );\n }\n if (responseBody) {\n if (!Array.isArray(responseBody)) {\n // xml2js will interpret a single element array as just the element, so force it to be an array\n responseBody = [responseBody];\n }\n\n const tempArray = [];\n for (let i = 0; i < responseBody.length; i++) {\n tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`);\n }\n return tempArray;\n }\n return responseBody;\n}\n\nfunction getPolymorphicMapper(\n serializer: Serializer,\n mapper: CompositeMapper,\n object: any,\n polymorphicPropertyName: \"clientName\" | \"serializedName\"\n): CompositeMapper {\n const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper);\n if (polymorphicDiscriminator) {\n const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName];\n if (discriminatorName != undefined) {\n const discriminatorValue = object[discriminatorName];\n if (discriminatorValue != undefined) {\n const typeName = mapper.type.uberParent || mapper.type.className;\n const indexDiscriminator =\n discriminatorValue === typeName\n ? discriminatorValue\n : typeName + \".\" + discriminatorValue;\n const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator];\n if (polymorphicMapper) {\n mapper = polymorphicMapper;\n }\n }\n }\n }\n return mapper;\n}\n\nfunction getPolymorphicDiscriminatorRecursively(\n serializer: Serializer,\n mapper: CompositeMapper\n): PolymorphicDiscriminator | undefined {\n return (\n mapper.type.polymorphicDiscriminator ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) ||\n getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)\n );\n}\n\nfunction getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) {\n return (\n typeName &&\n serializer.modelMappers[typeName] &&\n serializer.modelMappers[typeName].type.polymorphicDiscriminator\n );\n}\n\nexport interface MapperConstraints {\n InclusiveMaximum?: number;\n ExclusiveMaximum?: number;\n InclusiveMinimum?: number;\n ExclusiveMinimum?: number;\n MaxLength?: number;\n MinLength?: number;\n Pattern?: RegExp;\n MaxItems?: number;\n MinItems?: number;\n UniqueItems?: true;\n MultipleOf?: number;\n}\n\nexport type MapperType =\n | SimpleMapperType\n | CompositeMapperType\n | SequenceMapperType\n | DictionaryMapperType\n | EnumMapperType;\n\nexport interface SimpleMapperType {\n name:\n | \"Base64Url\"\n | \"Boolean\"\n | \"ByteArray\"\n | \"Date\"\n | \"DateTime\"\n | \"DateTimeRfc1123\"\n | \"Object\"\n | \"Stream\"\n | \"String\"\n | \"TimeSpan\"\n | \"UnixTime\"\n | \"Uuid\"\n | \"Number\"\n | \"any\";\n}\n\nexport interface CompositeMapperType {\n name: \"Composite\";\n\n // Only one of the two below properties should be present.\n // Use className to reference another type definition,\n // and use modelProperties/additionalProperties when the reference to the other type has been resolved.\n className?: string;\n\n modelProperties?: { [propertyName: string]: Mapper };\n additionalProperties?: Mapper;\n\n uberParent?: string;\n polymorphicDiscriminator?: PolymorphicDiscriminator;\n}\n\nexport interface SequenceMapperType {\n name: \"Sequence\";\n element: Mapper;\n}\n\nexport interface DictionaryMapperType {\n name: \"Dictionary\";\n value: Mapper;\n}\n\nexport interface EnumMapperType {\n name: \"Enum\";\n allowedValues: any[];\n}\n\nexport interface BaseMapper {\n xmlName?: string;\n xmlIsAttribute?: boolean;\n xmlElementName?: string;\n xmlIsWrapped?: boolean;\n readOnly?: boolean;\n isConstant?: boolean;\n required?: boolean;\n nullable?: boolean;\n serializedName?: string;\n type: MapperType;\n defaultValue?: any;\n constraints?: MapperConstraints;\n}\n\nexport type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper;\n\nexport interface PolymorphicDiscriminator {\n serializedName: string;\n clientName: string;\n [key: string]: string;\n}\n\nexport interface CompositeMapper extends BaseMapper {\n type: CompositeMapperType;\n}\n\nexport interface SequenceMapper extends BaseMapper {\n type: SequenceMapperType;\n}\n\nexport interface DictionaryMapper extends BaseMapper {\n type: DictionaryMapperType;\n headerCollectionPrefix?: string;\n}\n\nexport interface EnumMapper extends BaseMapper {\n type: EnumMapperType;\n}\n\nexport interface UrlParameterValue {\n value: string;\n skipUrlEncoding: boolean;\n}\n\n// TODO: why is this here?\nexport function serializeObject(toSerialize: any): any {\n if (toSerialize == undefined) return undefined;\n if (toSerialize instanceof Uint8Array) {\n toSerialize = base64.encodeByteArray(toSerialize);\n return toSerialize;\n } else if (toSerialize instanceof Date) {\n return toSerialize.toISOString();\n } else if (Array.isArray(toSerialize)) {\n const array = [];\n for (let i = 0; i < toSerialize.length; i++) {\n array.push(serializeObject(toSerialize[i]));\n }\n return array;\n } else if (typeof toSerialize === \"object\") {\n const dictionary: { [key: string]: any } = {};\n for (const property in toSerialize) {\n dictionary[property] = serializeObject(toSerialize[property]);\n }\n return dictionary;\n }\n return toSerialize;\n}\n\n/**\n * Utility function to create a K:V from a list of strings\n */\nfunction strEnum(o: Array): { [K in T]: K } {\n const result: any = {};\n for (const key of o) {\n result[key] = key;\n }\n return result;\n}\n\nexport const MapperType = strEnum([\n \"Base64Url\",\n \"Boolean\",\n \"ByteArray\",\n \"Composite\",\n \"Date\",\n \"DateTime\",\n \"DateTimeRfc1123\",\n \"Dictionary\",\n \"Enum\",\n \"Number\",\n \"Object\",\n \"Sequence\",\n \"String\",\n \"Stream\",\n \"TimeSpan\",\n \"UnixTime\",\n]);\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from \"./httpHeaders\";\nimport { OperationSpec } from \"./operationSpec\";\nimport { Mapper, Serializer } from \"./serializer\";\nimport { generateUuid } from \"./util/utils\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { AgentSettings, ProxySettings } from \"./serviceClient\";\n\nexport type HttpMethods =\n | \"GET\"\n | \"PUT\"\n | \"POST\"\n | \"DELETE\"\n | \"PATCH\"\n | \"HEAD\"\n | \"OPTIONS\"\n | \"TRACE\";\nexport type HttpRequestBody =\n | Blob\n | string\n | ArrayBuffer\n | ArrayBufferView\n | (() => NodeJS.ReadableStream);\n\n/**\n * Fired in response to upload or download progress.\n */\nexport type TransferProgressEvent = {\n /**\n * The number of bytes loaded so far.\n */\n loadedBytes: number;\n};\n\n/**\n * Allows the request to be aborted upon firing of the \"abort\" event.\n * Compatible with the browser built-in AbortSignal and common polyfills.\n */\nexport interface AbortSignalLike {\n readonly aborted: boolean;\n dispatchEvent: (event: Event) => boolean;\n onabort: ((this: AbortSignalLike, ev: Event) => any) | null;\n addEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n removeEventListener: (\n type: \"abort\",\n listener: (this: AbortSignalLike, ev: Event) => any,\n options?: any\n ) => void;\n}\n\n/**\n * An abstraction over a REST call.\n */\nexport interface WebResourceLike {\n /**\n * The URL being accessed by the request.\n */\n url: string;\n /**\n * The HTTP method to use when making the request.\n */\n method: HttpMethods;\n /**\n * The HTTP body contents of the request.\n */\n body?: any;\n /**\n * The HTTP headers to use when making the request.\n */\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n /**\n * A query string represented as an object.\n */\n query?: { [key: string]: any };\n /**\n * Used to parse the response.\n */\n operationSpec?: OperationSpec;\n /**\n * If credentials (cookies) should be sent along during an XHR.\n */\n withCredentials: boolean;\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n * If the request is terminated, an `AbortError` is thrown.\n */\n timeout: number;\n /**\n * Proxy configuration.\n */\n proxySettings?: ProxySettings;\n /**\n * HTTP(S) agent configuration.\n */\n agentSettings?: AgentSettings;\n /**\n * If the connection should be reused.\n */\n keepAlive?: boolean;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n /**\n * Used to abort the request later.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void;\n\n /**\n * Sets options on the request.\n */\n prepare(options: RequestPrepareOptions): WebResourceLike;\n /**\n * Clone this request object.\n */\n clone(): WebResourceLike;\n}\n\nexport function isWebResourceLike(object: any): object is WebResourceLike {\n if (typeof object !== \"object\") {\n return false;\n }\n if (\n typeof object.url === \"string\" &&\n typeof object.method === \"string\" &&\n typeof object.headers === \"object\" &&\n isHttpHeadersLike(object.headers) &&\n typeof object.validateRequestProperties === \"function\" &&\n typeof object.prepare === \"function\" &&\n typeof object.clone === \"function\"\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Creates a new WebResource object.\n *\n * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary\n * properties to initiate a request.\n *\n * @constructor\n */\nexport class WebResource {\n url: string;\n method: HttpMethods;\n body?: any;\n headers: HttpHeadersLike;\n /**\n * Whether or not the body of the HttpOperationResponse should be treated as a stream.\n */\n streamResponseBody?: boolean;\n /**\n * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the\n * HttpOperationResponse should be deserialized.\n */\n shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean);\n /**\n * A function that returns the proper OperationResponse for the given OperationSpec and\n * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will\n * be used.\n */\n operationResponseGetter?: (\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse;\n formData?: any;\n query?: { [key: string]: any };\n operationSpec?: OperationSpec;\n withCredentials: boolean;\n timeout: number;\n proxySettings?: ProxySettings;\n keepAlive?: boolean;\n agentSettings?: AgentSettings;\n redirectLimit?: number;\n\n abortSignal?: AbortSignalLike;\n\n /** Callback which fires upon upload progress. */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /** Callback which fires upon download progress. */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n constructor(\n url?: string,\n method?: HttpMethods,\n body?: any,\n query?: { [key: string]: any },\n headers?: { [key: string]: any } | HttpHeadersLike,\n streamResponseBody?: boolean,\n withCredentials?: boolean,\n abortSignal?: AbortSignalLike,\n timeout?: number,\n onUploadProgress?: (progress: TransferProgressEvent) => void,\n onDownloadProgress?: (progress: TransferProgressEvent) => void,\n proxySettings?: ProxySettings,\n keepAlive?: boolean,\n agentSettings?: AgentSettings,\n redirectLimit?: number\n ) {\n this.streamResponseBody = streamResponseBody;\n this.url = url || \"\";\n this.method = method || \"GET\";\n this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers);\n this.body = body;\n this.query = query;\n this.formData = undefined;\n this.withCredentials = withCredentials || false;\n this.abortSignal = abortSignal;\n this.timeout = timeout || 0;\n this.onUploadProgress = onUploadProgress;\n this.onDownloadProgress = onDownloadProgress;\n this.proxySettings = proxySettings;\n this.keepAlive = keepAlive;\n this.agentSettings = agentSettings;\n this.redirectLimit = redirectLimit;\n }\n\n /**\n * Validates that the required properties such as method, url, headers[\"Content-Type\"],\n * headers[\"accept-language\"] are defined. It will throw an error if one of the above\n * mentioned properties are not defined.\n */\n validateRequestProperties(): void {\n if (!this.method) {\n throw new Error(\"WebResource.method is required.\");\n }\n if (!this.url) {\n throw new Error(\"WebResource.url is required.\");\n }\n }\n\n /**\n * Prepares the request.\n * @param {RequestPrepareOptions} options Options to provide for preparing the request.\n * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline.\n */\n prepare(options: RequestPrepareOptions): WebResource {\n if (!options) {\n throw new Error(\"options object is required\");\n }\n\n if (options.method == undefined || typeof options.method.valueOf() !== \"string\") {\n throw new Error(\"options.method must be a string.\");\n }\n\n if (options.url && options.pathTemplate) {\n throw new Error(\n \"options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them.\"\n );\n }\n\n if (\n (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== \"string\") &&\n (options.url == undefined || typeof options.url.valueOf() !== \"string\")\n ) {\n throw new Error(\"Please provide exactly one of options.pathTemplate or options.url.\");\n }\n\n // set the url if it is provided.\n if (options.url) {\n if (typeof options.url !== \"string\") {\n throw new Error('options.url must be of type \"string\".');\n }\n this.url = options.url;\n }\n\n // set the method\n if (options.method) {\n const validMethods = [\"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\", \"PATCH\", \"TRACE\"];\n if (validMethods.indexOf(options.method.toUpperCase()) === -1) {\n throw new Error(\n 'The provided method \"' +\n options.method +\n '\" is invalid. Supported HTTP methods are: ' +\n JSON.stringify(validMethods)\n );\n }\n }\n this.method = options.method.toUpperCase() as HttpMethods;\n\n // construct the url if path template is provided\n if (options.pathTemplate) {\n const { pathTemplate, pathParameters } = options;\n if (typeof pathTemplate !== \"string\") {\n throw new Error('options.pathTemplate must be of type \"string\".');\n }\n if (!options.baseUrl) {\n options.baseUrl = \"https://management.azure.com\";\n }\n const baseUrl = options.baseUrl;\n let url =\n baseUrl +\n (baseUrl.endsWith(\"/\") ? \"\" : \"/\") +\n (pathTemplate.startsWith(\"/\") ? pathTemplate.slice(1) : pathTemplate);\n const segments = url.match(/({\\w*\\s*\\w*})/gi);\n if (segments && segments.length) {\n if (!pathParameters) {\n throw new Error(\n `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`\n );\n }\n segments.forEach(function (item) {\n const pathParamName = item.slice(1, -1);\n const pathParam = (pathParameters as { [key: string]: any })[pathParamName];\n if (\n pathParam === null ||\n pathParam === undefined ||\n !(typeof pathParam === \"string\" || typeof pathParam === \"object\")\n ) {\n throw new Error(\n `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` +\n ` however, it is not present in ${pathParameters} - ${JSON.stringify(\n pathParameters,\n undefined,\n 2\n )}.` +\n `The value of the path parameter can either be a \"string\" of the form { ${pathParamName}: \"some sample value\" } or ` +\n `it can be an \"object\" of the form { \"${pathParamName}\": { value: \"some sample value\", skipUrlEncoding: true } }.`\n );\n }\n\n if (typeof pathParam.valueOf() === \"string\") {\n url = url.replace(item, encodeURIComponent(pathParam));\n }\n\n if (typeof pathParam.valueOf() === \"object\") {\n if (!pathParam.value) {\n throw new Error(\n `options.pathParameters[${pathParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (pathParam.skipUrlEncoding) {\n url = url.replace(item, pathParam.value);\n } else {\n url = url.replace(item, encodeURIComponent(pathParam.value));\n }\n }\n });\n }\n this.url = url;\n }\n\n // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option.\n if (options.queryParameters) {\n const queryParameters = options.queryParameters;\n if (typeof queryParameters !== \"object\") {\n throw new Error(\n `options.queryParameters must be of type object. It should be a JSON object ` +\n `of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. ` +\n `The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }.`\n );\n }\n // append question mark if it is not present in the url\n if (this.url && this.url.indexOf(\"?\") === -1) {\n this.url += \"?\";\n }\n // construct queryString\n const queryParams = [];\n // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest().\n this.query = {};\n for (const queryParamName in queryParameters) {\n const queryParam: any = queryParameters[queryParamName];\n if (queryParam) {\n if (typeof queryParam === \"string\") {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam));\n this.query[queryParamName] = encodeURIComponent(queryParam);\n } else if (typeof queryParam === \"object\") {\n if (!queryParam.value) {\n throw new Error(\n `options.queryParameters[${queryParamName}] is of type \"object\" but it does not contain a \"value\" property.`\n );\n }\n if (queryParam.skipUrlEncoding) {\n queryParams.push(queryParamName + \"=\" + queryParam.value);\n this.query[queryParamName] = queryParam.value;\n } else {\n queryParams.push(queryParamName + \"=\" + encodeURIComponent(queryParam.value));\n this.query[queryParamName] = encodeURIComponent(queryParam.value);\n }\n }\n }\n } // end-of-for\n // append the queryString\n this.url += queryParams.join(\"&\");\n }\n\n // add headers to the request if they are provided\n if (options.headers) {\n const headers = options.headers;\n for (const headerName of Object.keys(options.headers)) {\n this.headers.set(headerName, headers[headerName]);\n }\n }\n // ensure accept-language is set correctly\n if (!this.headers.get(\"accept-language\")) {\n this.headers.set(\"accept-language\", \"en-US\");\n }\n // ensure the request-id is set correctly\n if (!this.headers.get(\"x-ms-client-request-id\") && !options.disableClientRequestId) {\n this.headers.set(\"x-ms-client-request-id\", generateUuid());\n }\n\n // default\n if (!this.headers.get(\"Content-Type\")) {\n this.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n }\n\n // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty\n this.body = options.body;\n if (options.body != undefined) {\n // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream.\n if (options.bodyIsStream) {\n if (!this.headers.get(\"Transfer-Encoding\")) {\n this.headers.set(\"Transfer-Encoding\", \"chunked\");\n }\n if (this.headers.get(\"Content-Type\") !== \"application/octet-stream\") {\n this.headers.set(\"Content-Type\", \"application/octet-stream\");\n }\n } else {\n if (options.serializationMapper) {\n this.body = new Serializer(options.mappers).serialize(\n options.serializationMapper,\n options.body,\n \"requestBody\"\n );\n }\n if (!options.disableJsonStringifyOnBody) {\n this.body = JSON.stringify(options.body);\n }\n }\n }\n\n this.abortSignal = options.abortSignal;\n this.onDownloadProgress = options.onDownloadProgress;\n this.onUploadProgress = options.onUploadProgress;\n this.redirectLimit = options.redirectLimit;\n this.streamResponseBody = options.streamResponseBody;\n\n return this;\n }\n\n /**\n * Clone this WebResource HTTP request object.\n * @returns {WebResource} The clone of this WebResource HTTP request object.\n */\n clone(): WebResource {\n const result = new WebResource(\n this.url,\n this.method,\n this.body,\n this.query,\n this.headers && this.headers.clone(),\n this.streamResponseBody,\n this.withCredentials,\n this.abortSignal,\n this.timeout,\n this.onUploadProgress,\n this.onDownloadProgress,\n this.proxySettings,\n this.keepAlive,\n this.agentSettings,\n this.redirectLimit\n );\n\n if (this.formData) {\n result.formData = this.formData;\n }\n\n if (this.operationSpec) {\n result.operationSpec = this.operationSpec;\n }\n\n if (this.shouldDeserialize) {\n result.shouldDeserialize = this.shouldDeserialize;\n }\n\n if (this.operationResponseGetter) {\n result.operationResponseGetter = this.operationResponseGetter;\n }\n\n return result;\n }\n}\n\nexport interface RequestPrepareOptions {\n /**\n * The HTTP request method. Valid values are \"GET\", \"PUT\", \"HEAD\", \"DELETE\", \"OPTIONS\", \"POST\",\n * or \"PATCH\".\n */\n method: HttpMethods;\n /**\n * The request url. It may or may not have query parameters in it. Either provide the \"url\" or\n * provide the \"pathTemplate\" in the options object. Both the options are mutually exclusive.\n */\n url?: string;\n /**\n * A dictionary of query parameters to be appended to the url, where\n * the \"key\" is the \"query-parameter-name\" and the \"value\" is the \"query-parameter-value\".\n * The \"query-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"query-parameter-value\".\n * Example:\n * - query-parameter-value in \"object\" format: { \"query-parameter-name\": { value: \"query-parameter-value\", skipUrlEncoding: true } }\n * - query-parameter-value in \"string\" format: { \"query-parameter-name\": \"query-parameter-value\"}.\n * Note: \"If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url.\n */\n queryParameters?: { [key: string]: any | ParameterValue };\n /**\n * The path template of the request url. Either provide the \"url\" or provide the \"pathTemplate\" in\n * the options object. Both the options are mutually exclusive.\n * Example: \"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}\"\n */\n pathTemplate?: string;\n /**\n * The base url of the request. Default value is: \"https://management.azure.com\". This is\n * applicable only with pathTemplate. If you are providing options.url then it is expected that\n * you provide the complete url.\n */\n baseUrl?: string;\n /**\n * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate.\n * Here the key is the \"path-parameter-name\" and the value is the \"path-parameter-value\".\n * The \"path-parameter-value\" can be of type \"string\" or it can be of type \"object\".\n * The \"object\" format should be used when you want to skip url encoding. While using the object format,\n * the object must have a property named value which provides the \"path-parameter-value\".\n * Example:\n * - path-parameter-value in \"object\" format: { \"path-parameter-name\": { value: \"path-parameter-value\", skipUrlEncoding: true } }\n * - path-parameter-value in \"string\" format: { \"path-parameter-name\": \"path-parameter-value\" }.\n */\n pathParameters?: { [key: string]: any | ParameterValue };\n formData?: { [key: string]: any };\n /**\n * A dictionary of request headers that need to be applied to the request.\n * Here the key is the \"header-name\" and the value is the \"header-value\". The header-value MUST be of type string.\n * - ContentType must be provided with the key name as \"Content-Type\". Default value \"application/json; charset=utf-8\".\n * - \"Transfer-Encoding\" is set to \"chunked\" by default if \"options.bodyIsStream\" is set to true.\n * - \"Content-Type\" is set to \"application/octet-stream\" by default if \"options.bodyIsStream\" is set to true.\n * - \"accept-language\" by default is set to \"en-US\"\n * - \"x-ms-client-request-id\" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true\n */\n headers?: { [key: string]: any };\n /**\n * When set to true, instructs the client to not set \"x-ms-client-request-id\" header to a new Guid().\n */\n disableClientRequestId?: boolean;\n /**\n * The request body. It can be of any type. This value will be serialized if it is not a stream.\n */\n body?: any;\n /**\n * Provides information on how to serialize the request body.\n */\n serializationMapper?: Mapper;\n /**\n * A dictionary of mappers that may be used while [de]serialization.\n */\n mappers?: { [x: string]: any };\n /**\n * Provides information on how to deserialize the response body.\n */\n deserializationMapper?: object;\n /**\n * Indicates whether this method should JSON.stringify() the request body. Default value: false.\n */\n disableJsonStringifyOnBody?: boolean;\n /**\n * Indicates whether the request body is a stream (useful for file upload scenarios).\n */\n bodyIsStream?: boolean;\n abortSignal?: AbortSignalLike;\n /**\n * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed.\n * If left undefined the default redirect behaviour of the underlying node_fetch will apply.\n */\n redirectLimit?: number;\n\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n streamResponseBody?: boolean;\n}\n\n/**\n * The Parameter value provided for path or query parameters in RequestPrepareOptions\n */\nexport interface ParameterValue {\n value: any;\n skipUrlEncoding: boolean;\n [key: string]: any;\n}\n\n/**\n * Describes the base structure of the options object that will be used in every operation.\n */\nexport interface RequestOptionsBase {\n /**\n * @property {object} [customHeaders] User defined custom request headers that\n * will be applied before the request is sent.\n */\n customHeaders?: { [key: string]: string };\n\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n\n /**\n * Callback which fires upon upload progress.\n */\n onUploadProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback which fires upon download progress.\n */\n onDownloadProgress?: (progress: TransferProgressEvent) => void;\n\n [key: string]: any;\n}\n","/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\n\nexport class RestError extends Error {\n static readonly REQUEST_SEND_ERROR: string = \"REQUEST_SEND_ERROR\";\n static readonly REQUEST_ABORTED_ERROR: string = \"REQUEST_ABORTED_ERROR\";\n static readonly PARSE_ERROR: string = \"PARSE_ERROR\";\n\n code?: string;\n statusCode?: number;\n request?: WebResourceLike;\n response?: HttpOperationResponse;\n body?: any;\n constructor(\n message: string,\n code?: string,\n statusCode?: number,\n request?: WebResourceLike,\n response?: HttpOperationResponse,\n body?: any\n ) {\n super(message);\n this.code = code;\n this.statusCode = statusCode;\n this.request = request;\n this.response = response;\n this.body = body;\n\n Object.setPrototypeOf(this, RestError.prototype);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport AbortController from \"abort-controller\";\nimport FormData from \"form-data\";\n\nimport { HttpClient } from \"./httpClient\";\nimport { WebResourceLike } from \"./webResource\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { HttpHeaders, HttpHeadersLike } from \"./httpHeaders\";\nimport { RestError } from \"./restError\";\nimport { Readable, Transform } from \"stream\";\n\ninterface FetchError extends Error {\n code?: string;\n errno?: string;\n type?: string;\n}\n\nexport type CommonRequestInfo = string; // we only call fetch() on string urls.\n\nexport type CommonRequestInit = Omit & {\n body?: any;\n headers?: any;\n signal?: any;\n};\n\nexport type CommonResponse = Omit & {\n body: any;\n trailer: any;\n formData: any;\n};\n\nexport abstract class FetchHttpClient implements HttpClient {\n async sendRequest(httpRequest: WebResourceLike): Promise {\n if (!httpRequest && typeof httpRequest !== \"object\") {\n throw new Error(\n \"'httpRequest' (WebResource) cannot be null or undefined and must be of type object.\"\n );\n }\n\n const abortController = new AbortController();\n let abortListener: ((event: any) => void) | undefined;\n if (httpRequest.abortSignal) {\n if (httpRequest.abortSignal.aborted) {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n abortListener = (event: Event) => {\n if (event.type === \"abort\") {\n abortController.abort();\n }\n };\n httpRequest.abortSignal.addEventListener(\"abort\", abortListener);\n }\n\n if (httpRequest.timeout) {\n setTimeout(() => {\n abortController.abort();\n }, httpRequest.timeout);\n }\n\n if (httpRequest.formData) {\n const formData: any = httpRequest.formData;\n const requestForm = new FormData();\n const appendFormValue = (key: string, value: any) => {\n // value function probably returns a stream so we can provide a fresh stream on each retry\n if (typeof value === \"function\") {\n value = value();\n }\n if (value && value.hasOwnProperty(\"value\") && value.hasOwnProperty(\"options\")) {\n requestForm.append(key, value.value, value.options);\n } else {\n requestForm.append(key, value);\n }\n };\n for (const formKey of Object.keys(formData)) {\n const formValue = formData[formKey];\n if (Array.isArray(formValue)) {\n for (let j = 0; j < formValue.length; j++) {\n appendFormValue(formKey, formValue[j]);\n }\n } else {\n appendFormValue(formKey, formValue);\n }\n }\n\n httpRequest.body = requestForm;\n httpRequest.formData = undefined;\n const contentType = httpRequest.headers.get(\"Content-Type\");\n if (contentType && contentType.indexOf(\"multipart/form-data\") !== -1) {\n if (typeof requestForm.getBoundary === \"function\") {\n httpRequest.headers.set(\n \"Content-Type\",\n `multipart/form-data; boundary=${requestForm.getBoundary()}`\n );\n } else {\n // browser will automatically apply a suitable content-type header\n httpRequest.headers.remove(\"Content-Type\");\n }\n }\n }\n\n let body = httpRequest.body\n ? typeof httpRequest.body === \"function\"\n ? httpRequest.body()\n : httpRequest.body\n : undefined;\n if (httpRequest.onUploadProgress && httpRequest.body) {\n let loadedBytes = 0;\n const uploadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n httpRequest.onUploadProgress!({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n\n if (isReadableStream(body)) {\n body.pipe(uploadReportStream);\n } else {\n uploadReportStream.end(body);\n }\n\n body = uploadReportStream;\n }\n\n const platformSpecificRequestInit: Partial = await this.prepareRequest(\n httpRequest\n );\n\n const requestInit: RequestInit = {\n body: body,\n headers: httpRequest.headers.rawHeaders(),\n method: httpRequest.method,\n signal: abortController.signal,\n redirect: \"manual\",\n ...platformSpecificRequestInit,\n };\n\n let operationResponse: HttpOperationResponse | undefined;\n try {\n const response: CommonResponse = await this.fetch(httpRequest.url, requestInit);\n\n const headers = parseHeaders(response.headers);\n operationResponse = {\n headers: headers,\n request: httpRequest,\n status: response.status,\n readableStreamBody: httpRequest.streamResponseBody\n ? ((response.body as unknown) as NodeJS.ReadableStream)\n : undefined,\n bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined,\n redirected: response.redirected,\n url: response.url,\n };\n\n const onDownloadProgress = httpRequest.onDownloadProgress;\n if (onDownloadProgress) {\n const responseBody: ReadableStream | undefined = response.body || undefined;\n\n if (isReadableStream(responseBody)) {\n let loadedBytes = 0;\n const downloadReportStream = new Transform({\n transform: (chunk: string | Buffer, _encoding, callback) => {\n loadedBytes += chunk.length;\n onDownloadProgress({ loadedBytes });\n callback(undefined, chunk);\n },\n });\n responseBody.pipe(downloadReportStream);\n operationResponse.readableStreamBody = downloadReportStream;\n } else {\n const length = parseInt(headers.get(\"Content-Length\")!) || undefined;\n if (length) {\n // Calling callback for non-stream response for consistency with browser\n onDownloadProgress({ loadedBytes: length });\n }\n }\n }\n\n await this.processRequest(operationResponse);\n\n return operationResponse;\n } catch (error) {\n const fetchError: FetchError = error;\n if (fetchError.code === \"ENOTFOUND\") {\n throw new RestError(\n fetchError.message,\n RestError.REQUEST_SEND_ERROR,\n undefined,\n httpRequest\n );\n } else if (fetchError.type === \"aborted\") {\n throw new RestError(\n \"The request was aborted\",\n RestError.REQUEST_ABORTED_ERROR,\n undefined,\n httpRequest\n );\n }\n\n throw fetchError;\n } finally {\n // clean up event listener\n if (httpRequest.abortSignal && abortListener) {\n let uploadStreamDone = Promise.resolve();\n if (isReadableStream(body)) {\n uploadStreamDone = isStreamComplete(body);\n }\n let downloadStreamDone = Promise.resolve();\n if (isReadableStream(operationResponse?.readableStreamBody)) {\n downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody);\n }\n\n Promise.all([uploadStreamDone, downloadStreamDone])\n .then(() => {\n httpRequest.abortSignal?.removeEventListener(\"abort\", abortListener!);\n return;\n })\n .catch((_e) => {});\n }\n }\n }\n\n abstract async prepareRequest(httpRequest: WebResourceLike): Promise>;\n abstract async processRequest(operationResponse: HttpOperationResponse): Promise;\n abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise;\n}\n\nfunction isReadableStream(body: any): body is Readable {\n return body && typeof body.pipe === \"function\";\n}\n\nfunction isStreamComplete(stream: Readable): Promise {\n return new Promise((resolve) => {\n stream.on(\"close\", resolve);\n stream.on(\"end\", resolve);\n stream.on(\"error\", resolve);\n });\n}\n\nexport function parseHeaders(headers: Headers): HttpHeadersLike {\n const httpHeaders = new HttpHeaders();\n\n headers.forEach((value, key) => {\n httpHeaders.set(key, value);\n });\n\n return httpHeaders;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { replaceAll } from \"./util/utils\";\n\ntype URLQueryParseState = \"ParameterName\" | \"ParameterValue\" | \"Invalid\";\n\n/**\n * A class that handles the query portion of a URLBuilder.\n */\nexport class URLQuery {\n private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {};\n\n /**\n * Get whether or not there any query parameters in this URLQuery.\n */\n public any(): boolean {\n return Object.keys(this._rawQuery).length > 0;\n }\n\n /**\n * Set a query parameter with the provided name and value. If the parameterValue is undefined or\n * empty, then this will attempt to remove an existing query parameter with the provided\n * parameterName.\n */\n public set(parameterName: string, parameterValue: any): void {\n if (parameterName) {\n if (parameterValue != undefined) {\n const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString();\n this._rawQuery[parameterName] = newValue;\n } else {\n delete this._rawQuery[parameterName];\n }\n }\n }\n\n /**\n * Get the value of the query parameter with the provided name. If no parameter exists with the\n * provided parameter name, then undefined will be returned.\n */\n public get(parameterName: string): string | string[] | undefined {\n return parameterName ? this._rawQuery[parameterName] : undefined;\n }\n\n /**\n * Get the string representation of this query. The return value will not start with a \"?\".\n */\n public toString(): string {\n let result = \"\";\n for (const parameterName in this._rawQuery) {\n if (result) {\n result += \"&\";\n }\n const parameterValue = this._rawQuery[parameterName];\n if (Array.isArray(parameterValue)) {\n const parameterStrings = [];\n for (const parameterValueElement of parameterValue) {\n parameterStrings.push(`${parameterName}=${parameterValueElement}`);\n }\n result += parameterStrings.join(\"&\");\n } else {\n result += `${parameterName}=${parameterValue}`;\n }\n }\n return result;\n }\n\n /**\n * Parse a URLQuery from the provided text.\n */\n public static parse(text: string): URLQuery {\n const result = new URLQuery();\n\n if (text) {\n if (text.startsWith(\"?\")) {\n text = text.substring(1);\n }\n\n let currentState: URLQueryParseState = \"ParameterName\";\n\n let parameterName = \"\";\n let parameterValue = \"\";\n for (let i = 0; i < text.length; ++i) {\n const currentCharacter: string = text[i];\n switch (currentState) {\n case \"ParameterName\":\n switch (currentCharacter) {\n case \"=\":\n currentState = \"ParameterValue\";\n break;\n\n case \"&\":\n parameterName = \"\";\n parameterValue = \"\";\n break;\n\n default:\n parameterName += currentCharacter;\n break;\n }\n break;\n\n case \"ParameterValue\":\n switch (currentCharacter) {\n case \"&\":\n result.set(parameterName, parameterValue);\n parameterName = \"\";\n parameterValue = \"\";\n currentState = \"ParameterName\";\n break;\n\n default:\n parameterValue += currentCharacter;\n break;\n }\n break;\n\n default:\n throw new Error(\"Unrecognized URLQuery parse state: \" + currentState);\n }\n }\n if (currentState === \"ParameterValue\") {\n result.set(parameterName, parameterValue);\n }\n }\n\n return result;\n }\n}\n\n/**\n * A class that handles creating, modifying, and parsing URLs.\n */\nexport class URLBuilder {\n private _scheme: string | undefined;\n private _host: string | undefined;\n private _port: string | undefined;\n private _path: string | undefined;\n private _query: URLQuery | undefined;\n\n /**\n * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL\n * (such as a host, port, path, or query), those parts will be added to this URL as well.\n */\n public setScheme(scheme: string | undefined): void {\n if (!scheme) {\n this._scheme = undefined;\n } else {\n this.set(scheme, \"SCHEME\");\n }\n }\n\n /**\n * Get the scheme that has been set in this URL.\n */\n public getScheme(): string | undefined {\n return this._scheme;\n }\n\n /**\n * Set the host for this URL. If the provided host contains other parts of a URL (such as a\n * port, path, or query), those parts will be added to this URL as well.\n */\n public setHost(host: string | undefined): void {\n if (!host) {\n this._host = undefined;\n } else {\n this.set(host, \"SCHEME_OR_HOST\");\n }\n }\n\n /**\n * Get the host that has been set in this URL.\n */\n public getHost(): string | undefined {\n return this._host;\n }\n\n /**\n * Set the port for this URL. If the provided port contains other parts of a URL (such as a\n * path or query), those parts will be added to this URL as well.\n */\n public setPort(port: number | string | undefined): void {\n if (port == undefined || port === \"\") {\n this._port = undefined;\n } else {\n this.set(port.toString(), \"PORT\");\n }\n }\n\n /**\n * Get the port that has been set in this URL.\n */\n public getPort(): string | undefined {\n return this._port;\n }\n\n /**\n * Set the path for this URL. If the provided path contains a query, then it will be added to\n * this URL as well.\n */\n public setPath(path: string | undefined): void {\n if (!path) {\n this._path = undefined;\n } else {\n const schemeIndex = path.indexOf(\"://\");\n if (schemeIndex !== -1) {\n const schemeStart = path.lastIndexOf(\"/\", schemeIndex);\n // Make sure to only grab the URL part of the path before setting the state back to SCHEME\n // this will handle cases such as \"/a/b/c/https://microsoft.com\" => \"https://microsoft.com\"\n this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), \"SCHEME\");\n } else {\n this.set(path, \"PATH\");\n }\n }\n }\n\n /**\n * Append the provided path to this URL's existing path. If the provided path contains a query,\n * then it will be added to this URL as well.\n */\n public appendPath(path: string | undefined): void {\n if (path) {\n let currentPath: string | undefined = this.getPath();\n if (currentPath) {\n if (!currentPath.endsWith(\"/\")) {\n currentPath += \"/\";\n }\n\n if (path.startsWith(\"/\")) {\n path = path.substring(1);\n }\n\n path = currentPath + path;\n }\n this.set(path, \"PATH\");\n }\n }\n\n /**\n * Get the path that has been set in this URL.\n */\n public getPath(): string | undefined {\n return this._path;\n }\n\n /**\n * Set the query in this URL.\n */\n public setQuery(query: string | undefined): void {\n if (!query) {\n this._query = undefined;\n } else {\n this._query = URLQuery.parse(query);\n }\n }\n\n /**\n * Set a query parameter with the provided name and value in this URL's query. If the provided\n * query parameter value is undefined or empty, then the query parameter will be removed if it\n * existed.\n */\n public setQueryParameter(queryParameterName: string, queryParameterValue: any): void {\n if (queryParameterName) {\n if (!this._query) {\n this._query = new URLQuery();\n }\n this._query.set(queryParameterName, queryParameterValue);\n }\n }\n\n /**\n * Get the value of the query parameter with the provided query parameter name. If no query\n * parameter exists with the provided name, then undefined will be returned.\n */\n public getQueryParameterValue(queryParameterName: string): string | string[] | undefined {\n return this._query ? this._query.get(queryParameterName) : undefined;\n }\n\n /**\n * Get the query in this URL.\n */\n public getQuery(): string | undefined {\n return this._query ? this._query.toString() : undefined;\n }\n\n /**\n * Set the parts of this URL by parsing the provided text using the provided startState.\n */\n private set(text: string, startState: URLTokenizerState): void {\n const tokenizer = new URLTokenizer(text, startState);\n\n while (tokenizer.next()) {\n const token: URLToken | undefined = tokenizer.current();\n if (token) {\n switch (token.type) {\n case \"SCHEME\":\n this._scheme = token.text || undefined;\n break;\n\n case \"HOST\":\n this._host = token.text || undefined;\n break;\n\n case \"PORT\":\n this._port = token.text || undefined;\n break;\n\n case \"PATH\":\n const tokenPath: string | undefined = token.text || undefined;\n if (!this._path || this._path === \"/\" || tokenPath !== \"/\") {\n this._path = tokenPath;\n }\n break;\n\n case \"QUERY\":\n this._query = URLQuery.parse(token.text);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenType: ${token.type}`);\n }\n }\n }\n }\n\n public toString(): string {\n let result = \"\";\n\n if (this._scheme) {\n result += `${this._scheme}://`;\n }\n\n if (this._host) {\n result += this._host;\n }\n\n if (this._port) {\n result += `:${this._port}`;\n }\n\n if (this._path) {\n if (!this._path.startsWith(\"/\")) {\n result += \"/\";\n }\n result += this._path;\n }\n\n if (this._query && this._query.any()) {\n result += `?${this._query.toString()}`;\n }\n\n return result;\n }\n\n /**\n * If the provided searchValue is found in this URLBuilder, then replace it with the provided\n * replaceValue.\n */\n public replaceAll(searchValue: string, replaceValue: string): void {\n if (searchValue) {\n this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue));\n this.setHost(replaceAll(this.getHost(), searchValue, replaceValue));\n this.setPort(replaceAll(this.getPort(), searchValue, replaceValue));\n this.setPath(replaceAll(this.getPath(), searchValue, replaceValue));\n this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue));\n }\n }\n\n public static parse(text: string): URLBuilder {\n const result = new URLBuilder();\n result.set(text, \"SCHEME_OR_HOST\");\n return result;\n }\n}\n\ntype URLTokenizerState = \"SCHEME\" | \"SCHEME_OR_HOST\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\" | \"DONE\";\n\ntype URLTokenType = \"SCHEME\" | \"HOST\" | \"PORT\" | \"PATH\" | \"QUERY\";\n\nexport class URLToken {\n public constructor(public readonly text: string, public readonly type: URLTokenType) {}\n\n public static scheme(text: string): URLToken {\n return new URLToken(text, \"SCHEME\");\n }\n\n public static host(text: string): URLToken {\n return new URLToken(text, \"HOST\");\n }\n\n public static port(text: string): URLToken {\n return new URLToken(text, \"PORT\");\n }\n\n public static path(text: string): URLToken {\n return new URLToken(text, \"PATH\");\n }\n\n public static query(text: string): URLToken {\n return new URLToken(text, \"QUERY\");\n }\n}\n\n/**\n * Get whether or not the provided character (single character string) is an alphanumeric (letter or\n * digit) character.\n */\nexport function isAlphaNumericCharacter(character: string): boolean {\n const characterCode: number = character.charCodeAt(0);\n return (\n (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ ||\n (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ ||\n (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */\n );\n}\n\n/**\n * A class that tokenizes URL strings.\n */\nexport class URLTokenizer {\n readonly _textLength: number;\n _currentState: URLTokenizerState;\n _currentIndex: number;\n _currentToken: URLToken | undefined;\n\n public constructor(readonly _text: string, state?: URLTokenizerState) {\n this._textLength = _text ? _text.length : 0;\n this._currentState = state != undefined ? state : \"SCHEME_OR_HOST\";\n this._currentIndex = 0;\n }\n\n /**\n * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer\n * hasn't started or has finished tokenizing.\n */\n public current(): URLToken | undefined {\n return this._currentToken;\n }\n\n /**\n * Advance to the next URLToken and return whether or not a URLToken was found.\n */\n public next(): boolean {\n if (!hasCurrentCharacter(this)) {\n this._currentToken = undefined;\n } else {\n switch (this._currentState) {\n case \"SCHEME\":\n nextScheme(this);\n break;\n\n case \"SCHEME_OR_HOST\":\n nextSchemeOrHost(this);\n break;\n\n case \"HOST\":\n nextHost(this);\n break;\n\n case \"PORT\":\n nextPort(this);\n break;\n\n case \"PATH\":\n nextPath(this);\n break;\n\n case \"QUERY\":\n nextQuery(this);\n break;\n\n default:\n throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`);\n }\n }\n return !!this._currentToken;\n }\n}\n\n/**\n * Read the remaining characters from this Tokenizer's character stream.\n */\nfunction readRemaining(tokenizer: URLTokenizer): string {\n let result = \"\";\n if (tokenizer._currentIndex < tokenizer._textLength) {\n result = tokenizer._text.substring(tokenizer._currentIndex);\n tokenizer._currentIndex = tokenizer._textLength;\n }\n return result;\n}\n\n/**\n * Whether or not this URLTokenizer has a current character.\n */\nfunction hasCurrentCharacter(tokenizer: URLTokenizer): boolean {\n return tokenizer._currentIndex < tokenizer._textLength;\n}\n\n/**\n * Get the character in the text string at the current index.\n */\nfunction getCurrentCharacter(tokenizer: URLTokenizer): string {\n return tokenizer._text[tokenizer._currentIndex];\n}\n\n/**\n * Advance to the character in text that is \"step\" characters ahead. If no step value is provided,\n * then step will default to 1.\n */\nfunction nextCharacter(tokenizer: URLTokenizer, step?: number): void {\n if (hasCurrentCharacter(tokenizer)) {\n if (!step) {\n step = 1;\n }\n tokenizer._currentIndex += step;\n }\n}\n\n/**\n * Starting with the current character, peek \"charactersToPeek\" number of characters ahead in this\n * Tokenizer's stream of characters.\n */\nfunction peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string {\n let endIndex: number = tokenizer._currentIndex + charactersToPeek;\n if (tokenizer._textLength < endIndex) {\n endIndex = tokenizer._textLength;\n }\n return tokenizer._text.substring(tokenizer._currentIndex, endIndex);\n}\n\n/**\n * Read characters from this Tokenizer until the end of the stream or until the provided condition\n * is false when provided the current character.\n */\nfunction readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string {\n let result = \"\";\n\n while (hasCurrentCharacter(tokenizer)) {\n const currentCharacter: string = getCurrentCharacter(tokenizer);\n if (!condition(currentCharacter)) {\n break;\n } else {\n result += currentCharacter;\n nextCharacter(tokenizer);\n }\n }\n\n return result;\n}\n\n/**\n * Read characters from this Tokenizer until a non-alphanumeric character or the end of the\n * character stream is reached.\n */\nfunction readWhileLetterOrDigit(tokenizer: URLTokenizer): string {\n return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character));\n}\n\n/**\n * Read characters from this Tokenizer until one of the provided terminating characters is read or\n * the end of the character stream is reached.\n */\nfunction readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string {\n return readWhile(\n tokenizer,\n (character: string) => terminatingCharacters.indexOf(character) === -1\n );\n}\n\nfunction nextScheme(tokenizer: URLTokenizer): void {\n const scheme: string = readWhileLetterOrDigit(tokenizer);\n tokenizer._currentToken = URLToken.scheme(scheme);\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"HOST\";\n }\n}\n\nfunction nextSchemeOrHost(tokenizer: URLTokenizer): void {\n const schemeOrHost: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n tokenizer._currentToken = URLToken.scheme(schemeOrHost);\n tokenizer._currentState = \"HOST\";\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n tokenizer._currentState = \"PORT\";\n }\n } else {\n tokenizer._currentToken = URLToken.host(schemeOrHost);\n if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n }\n}\n\nfunction nextHost(tokenizer: URLTokenizer): void {\n if (peekCharacters(tokenizer, 3) === \"://\") {\n nextCharacter(tokenizer, 3);\n }\n\n const host: string = readUntilCharacter(tokenizer, \":\", \"/\", \"?\");\n tokenizer._currentToken = URLToken.host(host);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \":\") {\n tokenizer._currentState = \"PORT\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPort(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \":\") {\n nextCharacter(tokenizer);\n }\n\n const port: string = readUntilCharacter(tokenizer, \"/\", \"?\");\n tokenizer._currentToken = URLToken.port(port);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else if (getCurrentCharacter(tokenizer) === \"/\") {\n tokenizer._currentState = \"PATH\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextPath(tokenizer: URLTokenizer): void {\n const path: string = readUntilCharacter(tokenizer, \"?\");\n tokenizer._currentToken = URLToken.path(path);\n\n if (!hasCurrentCharacter(tokenizer)) {\n tokenizer._currentState = \"DONE\";\n } else {\n tokenizer._currentState = \"QUERY\";\n }\n}\n\nfunction nextQuery(tokenizer: URLTokenizer): void {\n if (getCurrentCharacter(tokenizer) === \"?\") {\n nextCharacter(tokenizer);\n }\n\n const query: string = readRemaining(tokenizer);\n tokenizer._currentToken = URLToken.query(query);\n tokenizer._currentState = \"DONE\";\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport * as tunnel from \"tunnel\";\n\nimport { ProxySettings } from \"./serviceClient\";\nimport { URLBuilder } from \"./url\";\nimport { HttpHeadersLike } from \"./httpHeaders\";\n\nexport type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent };\nexport function createProxyAgent(\n requestUrl: string,\n proxySettings: ProxySettings,\n headers?: HttpHeadersLike\n): ProxyAgent {\n const tunnelOptions: tunnel.HttpsOverHttpsOptions = {\n proxy: {\n host: URLBuilder.parse(proxySettings.host).getHost() as string,\n port: proxySettings.port,\n headers: (headers && headers.rawHeaders()) || {},\n },\n };\n\n if (proxySettings.username && proxySettings.password) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`;\n } else if (proxySettings.username) {\n tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`;\n }\n\n const requestScheme = URLBuilder.parse(requestUrl).getScheme() || \"\";\n const isRequestHttps = requestScheme.toLowerCase() === \"https\";\n const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || \"\";\n const isProxyHttps = proxyScheme.toLowerCase() === \"https\";\n\n const proxyAgent = {\n isHttps: isRequestHttps,\n agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions),\n };\n\n return proxyAgent;\n}\n\n// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel\n// createIunnel() is only imported by tests.\nexport interface HttpsProxyOptions {\n host: string;\n port: number;\n localAddress?: string;\n proxyAuth?: string;\n headers?: { [key: string]: any };\n ca?: Buffer[];\n servername?: string;\n key?: Buffer;\n cert?: Buffer;\n}\n\ninterface HttpsOverHttpsOptions {\n maxSockets?: number;\n ca?: Buffer[];\n key?: Buffer;\n cert?: Buffer;\n proxy?: HttpsProxyOptions;\n}\n\nexport function createTunnel(\n isRequestHttps: boolean,\n isProxyHttps: boolean,\n tunnelOptions: HttpsOverHttpsOptions\n): http.Agent | https.Agent {\n if (isRequestHttps && isProxyHttps) {\n return tunnel.httpsOverHttps(tunnelOptions);\n } else if (isRequestHttps && !isProxyHttps) {\n return tunnel.httpsOverHttp(tunnelOptions);\n } else if (!isRequestHttps && isProxyHttps) {\n return tunnel.httpOverHttps(tunnelOptions);\n } else {\n return tunnel.httpOverHttp(tunnelOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as tough from \"tough-cookie\";\nimport * as http from \"http\";\nimport * as https from \"https\";\nimport node_fetch from \"node-fetch\";\n\nimport {\n CommonRequestInfo,\n CommonRequestInit,\n CommonResponse,\n FetchHttpClient,\n} from \"./fetchHttpClient\";\nimport { HttpOperationResponse } from \"./httpOperationResponse\";\nimport { WebResourceLike } from \"./webResource\";\nimport { createProxyAgent, ProxyAgent } from \"./proxyAgent\";\n\nexport class NodeFetchHttpClient extends FetchHttpClient {\n private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true });\n\n async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise {\n return (node_fetch(input, init) as unknown) as Promise;\n }\n\n async prepareRequest(httpRequest: WebResourceLike): Promise> {\n const requestInit: Partial = {};\n\n if (this.cookieJar && !httpRequest.headers.get(\"Cookie\")) {\n const cookieString = await new Promise((resolve, reject) => {\n this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => {\n if (err) {\n reject(err);\n } else {\n resolve(cookie);\n }\n });\n });\n\n httpRequest.headers.set(\"Cookie\", cookieString);\n }\n\n if (httpRequest.agentSettings) {\n const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings;\n if (httpsAgent && httpRequest.url.startsWith(\"https\")) {\n requestInit.agent = httpsAgent;\n } else if (httpAgent) {\n requestInit.agent = httpAgent;\n }\n } else if (httpRequest.proxySettings) {\n const tunnel: ProxyAgent = createProxyAgent(\n httpRequest.url,\n httpRequest.proxySettings,\n httpRequest.headers\n );\n requestInit.agent = tunnel.agent;\n }\n\n if (httpRequest.keepAlive === true) {\n if (requestInit.agent) {\n requestInit.agent.keepAlive = true;\n } else {\n const options: http.AgentOptions | https.AgentOptions = { keepAlive: true };\n const agent = httpRequest.url.startsWith(\"https\")\n ? new https.Agent(options)\n : new http.Agent(options);\n requestInit.agent = agent;\n }\n }\n\n return requestInit;\n }\n\n async processRequest(operationResponse: HttpOperationResponse): Promise {\n if (this.cookieJar) {\n const setCookieHeader = operationResponse.headers.get(\"Set-Cookie\");\n if (setCookieHeader != undefined) {\n await new Promise((resolve, reject) => {\n this.cookieJar!.setCookie(\n setCookieHeader,\n operationResponse.request.url,\n { ignoreError: true },\n (err) => {\n if (err) {\n reject(err);\n } else {\n resolve();\n }\n }\n );\n });\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The different levels of logs that can be used with the HttpPipelineLogger.\n */\nexport enum HttpPipelineLogLevel {\n /**\n * A log level that indicates that no logs will be logged.\n */\n OFF,\n\n /**\n * An error log.\n */\n ERROR,\n\n /**\n * A warning log.\n */\n WARNING,\n\n /**\n * An information log.\n */\n INFO,\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { Context, SpanOptions } from \"./tracing\";\n\n/**\n * Represents a credential capable of providing an authentication token.\n */\nexport interface TokenCredential {\n /**\n * Gets the token provided by this credential.\n *\n * This method is called automatically by Azure SDK client libraries. You may call this method\n * directly, but you must also handle token caching and token refreshing.\n *\n * @param scopes - The list of scopes for which the token will have access.\n * @param options - The options used to configure any requests this\n * TokenCredential implementation might make.\n */\n getToken(scopes: string | string[], options?: GetTokenOptions): Promise;\n}\n\n/**\n * Defines options for TokenCredential.getToken.\n */\nexport interface GetTokenOptions {\n /**\n * The signal which can be used to abort requests.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Options used when creating and sending HTTP requests for this operation.\n */\n requestOptions?: {\n /**\n * The number of milliseconds a request can take before automatically being terminated.\n */\n timeout?: number;\n };\n /**\n * Options used when tracing is enabled.\n */\n tracingOptions?: {\n /**\n * OpenTelemetry SpanOptions used to create a span when tracing is enabled.\n */\n spanOptions?: SpanOptions;\n\n /**\n * OpenTelemetry context\n */\n tracingContext?: Context;\n };\n\n /**\n * Allows specifying a tenantId. Useful to handle challenges that provide tenant Id hints.\n */\n tenantId?: string;\n}\n\n/**\n * Represents an access token with an expiration time.\n */\nexport interface AccessToken {\n /**\n * The access token returned by the authentication service.\n */\n token: string;\n\n /**\n * The access token's expiration timestamp in milliseconds, UNIX epoch time.\n */\n expiresOnTimestamp: number;\n}\n\n/**\n * Tests an object to determine whether it implements TokenCredential.\n *\n * @param credential - The assumed TokenCredential to be tested.\n */\nexport function isTokenCredential(credential: unknown): credential is TokenCredential {\n // Check for an object with a 'getToken' function and possibly with\n // a 'signRequest' function. We do this check to make sure that\n // a ServiceClientCredentials implementor (like TokenClientCredentials\n // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if\n // it doesn't actually implement TokenCredential also.\n const castCredential = credential as {\n getToken: unknown;\n signRequest: unknown;\n };\n return (\n castCredential &&\n typeof castCredential.getToken === \"function\" &&\n (castCredential.signRequest === undefined || castCredential.getToken.length > 0)\n );\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { Mapper } from \"./serializer\";\n\nexport type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath };\n\n/**\n * A common interface that all Operation parameter's extend.\n */\nexport interface OperationParameter {\n /**\n * The path to this parameter's value in OperationArguments or the object that contains paths for\n * each property's value in OperationArguments.\n */\n parameterPath: ParameterPath;\n\n /**\n * The mapper that defines how to validate and serialize this parameter's value.\n */\n mapper: Mapper;\n}\n\n/**\n * A parameter for an operation that will be substituted into the operation's request URL.\n */\nexport interface OperationURLParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the URL parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n}\n\n/**\n * A parameter for an operation that will be added as a query parameter to the operation's HTTP\n * request.\n */\nexport interface OperationQueryParameter extends OperationParameter {\n /**\n * Whether or not to skip encoding the query parameter's value before adding it to the URL.\n */\n skipEncoding?: boolean;\n\n /**\n * If this query parameter's value is a collection, what type of format should the value be\n * converted to.\n */\n collectionFormat?: QueryCollectionFormat;\n}\n\n/**\n * Get the path to this parameter's value as a dotted string (a.b.c).\n * @param parameter The parameter to get the path string for.\n * @returns The path to this parameter's value as a dotted string.\n */\nexport function getPathStringFromParameter(parameter: OperationParameter): string {\n return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper);\n}\n\nexport function getPathStringFromParameterPath(\n parameterPath: ParameterPath,\n mapper: Mapper\n): string {\n let result: string;\n if (typeof parameterPath === \"string\") {\n result = parameterPath;\n } else if (Array.isArray(parameterPath)) {\n result = parameterPath.join(\".\");\n } else {\n result = mapper.serializedName!;\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n OperationParameter,\n OperationQueryParameter,\n OperationURLParameter,\n} from \"./operationParameter\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { MapperType, Serializer } from \"./serializer\";\nimport { HttpMethods } from \"./webResource\";\n\n/**\n * A specification that defines an operation.\n */\nexport interface OperationSpec {\n /**\n * The serializer to use in this operation.\n */\n readonly serializer: Serializer;\n\n /**\n * The HTTP method that should be used by requests for this operation.\n */\n readonly httpMethod: HttpMethods;\n\n /**\n * The URL that was provided in the service's specification. This will still have all of the URL\n * template variables in it. If this is not provided when the OperationSpec is created, then it\n * will be populated by a \"baseUri\" property on the ServiceClient.\n */\n readonly baseUrl?: string;\n\n /**\n * The fixed path for this operation's URL. This will still have all of the URL template variables\n * in it.\n */\n readonly path?: string;\n\n /**\n * The content type of the request body. This value will be used as the \"Content-Type\" header if\n * it is provided.\n */\n readonly contentType?: string;\n\n /**\n * The parameter that will be used to construct the HTTP request's body.\n */\n readonly requestBody?: OperationParameter;\n\n /**\n * Whether or not this operation uses XML request and response bodies.\n */\n readonly isXML?: boolean;\n\n /**\n * The parameters to the operation method that will be substituted into the constructed URL.\n */\n readonly urlParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be added to the constructed URL's query.\n */\n readonly queryParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be converted to headers on the operation's\n * HTTP request.\n */\n readonly headerParameters?: ReadonlyArray;\n\n /**\n * The parameters to the operation method that will be used to create a formdata body for the\n * operation's HTTP request.\n */\n readonly formDataParameters?: ReadonlyArray;\n\n /**\n * The different types of responses that this operation can return based on what status code is\n * returned.\n */\n readonly responses: { [responseCode: string]: OperationResponse };\n}\n\nexport function isStreamOperation(operationSpec: OperationSpec): boolean {\n let result = false;\n for (const statusCode in operationSpec.responses) {\n const operationResponse: OperationResponse = operationSpec.responses[statusCode];\n if (\n operationResponse.bodyMapper &&\n operationResponse.bodyMapper.type.name === MapperType.Stream\n ) {\n result = true;\n break;\n }\n }\n return result;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as xml2js from \"xml2js\";\n\nexport function stringifyXML(obj: any, opts?: { rootName?: string }) {\n const builder = new xml2js.Builder({\n rootName: (opts || {}).rootName,\n renderOpts: {\n pretty: false,\n },\n });\n return builder.buildObject(obj);\n}\n\nexport function parseXML(str: string): Promise {\n const xmlParser = new xml2js.Parser({\n explicitArray: false,\n explicitCharkey: false,\n explicitRoot: false,\n });\n return new Promise((resolve, reject) => {\n if (!str) {\n reject(new Error(\"Document is empty\"));\n } else {\n xmlParser.parseString(str, (err: any, res: any) => {\n if (err) {\n reject(err);\n } else {\n resolve(res);\n }\n });\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { HttpPipelineLogger } from \"../httpPipelineLogger\";\nimport { HttpPipelineLogLevel } from \"../httpPipelineLogLevel\";\nimport { WebResourceLike } from \"../webResource\";\n\n/**\n * Creates a new RequestPolicy per-request that uses the provided nextPolicy.\n */\nexport type RequestPolicyFactory = {\n create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy;\n};\n\nexport interface RequestPolicy {\n sendRequest(httpRequest: WebResourceLike): Promise;\n}\n\nexport abstract class BaseRequestPolicy implements RequestPolicy {\n protected constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike\n ) {}\n\n public abstract sendRequest(webResource: WebResourceLike): Promise;\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return this._options.shouldLog(logLevel);\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n this._options.log(logLevel, message);\n }\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport interface RequestPolicyOptionsLike {\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n shouldLog(logLevel: HttpPipelineLogLevel): boolean;\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meet the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n log(logLevel: HttpPipelineLogLevel, message: string): void;\n}\n\n/**\n * Optional properties that can be used when creating a RequestPolicy.\n */\nexport class RequestPolicyOptions implements RequestPolicyOptionsLike {\n constructor(private _logger?: HttpPipelineLogger) {}\n\n /**\n * Get whether or not a log with the provided log level should be logged.\n * @param logLevel The log level of the log that will be logged.\n * @returns Whether or not a log with the provided log level should be logged.\n */\n public shouldLog(logLevel: HttpPipelineLogLevel): boolean {\n return (\n !!this._logger &&\n logLevel !== HttpPipelineLogLevel.OFF &&\n logLevel <= this._logger.minimumLogLevel\n );\n }\n\n /**\n * Attempt to log the provided message to the provided logger. If no logger was provided or if\n * the log level does not meat the logger's threshold, then nothing will be logged.\n * @param logLevel The log level of this log.\n * @param message The message of this log.\n */\n public log(logLevel: HttpPipelineLogLevel, message: string): void {\n if (this._logger && this.shouldLog(logLevel)) {\n this._logger.log(logLevel, message);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { OperationResponse } from \"../operationResponse\";\nimport { OperationSpec, isStreamOperation } from \"../operationSpec\";\nimport { RestError } from \"../restError\";\nimport { Mapper, MapperType } from \"../serializer\";\nimport * as utils from \"../util/utils\";\nimport { parseXML } from \"../util/xml\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * The content-types that will indicate that an operation response should be deserialized in a\n * particular way.\n */\nexport interface DeserializationContentTypes {\n /**\n * The content-types that indicate that an operation response should be deserialized as JSON.\n * Defaults to [ \"application/json\", \"text/json\" ].\n */\n json?: string[];\n\n /**\n * The content-types that indicate that an operation response should be deserialized as XML.\n * Defaults to [ \"application/xml\", \"application/atom+xml\" ].\n */\n xml?: string[];\n}\n\n/**\n * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they\n * pass through the HTTP pipeline.\n */\nexport function deserializationPolicy(\n deserializationContentTypes?: DeserializationContentTypes\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options);\n },\n };\n}\n\nexport const defaultJsonContentTypes = [\"application/json\", \"text/json\"];\nexport const defaultXmlContentTypes = [\"application/xml\", \"application/atom+xml\"];\n\n/**\n * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the\n * HTTP pipeline.\n */\nexport class DeserializationPolicy extends BaseRequestPolicy {\n public readonly jsonContentTypes: string[];\n public readonly xmlContentTypes: string[];\n\n constructor(\n nextPolicy: RequestPolicy,\n deserializationContentTypes: DeserializationContentTypes | undefined,\n options: RequestPolicyOptionsLike\n ) {\n super(nextPolicy, options);\n\n this.jsonContentTypes =\n (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes;\n this.xmlContentTypes =\n (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes;\n }\n\n public async sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response: HttpOperationResponse) =>\n deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response)\n );\n }\n}\n\nfunction getOperationResponse(\n parsedResponse: HttpOperationResponse\n): undefined | OperationResponse {\n let result: OperationResponse | undefined;\n const request: WebResourceLike = parsedResponse.request;\n const operationSpec: OperationSpec | undefined = request.operationSpec;\n if (operationSpec) {\n const operationResponseGetter:\n | undefined\n | ((\n operationSpec: OperationSpec,\n response: HttpOperationResponse\n ) => undefined | OperationResponse) = request.operationResponseGetter;\n if (!operationResponseGetter) {\n result = operationSpec.responses[parsedResponse.status];\n } else {\n result = operationResponseGetter(operationSpec, parsedResponse);\n }\n }\n return result;\n}\n\nfunction shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean {\n const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) =\n parsedResponse.request.shouldDeserialize;\n let result: boolean;\n if (shouldDeserialize === undefined) {\n result = true;\n } else if (typeof shouldDeserialize === \"boolean\") {\n result = shouldDeserialize;\n } else {\n result = shouldDeserialize(parsedResponse);\n }\n return result;\n}\n\nexport function deserializeResponseBody(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n response: HttpOperationResponse\n): Promise {\n return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => {\n const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse);\n if (shouldDeserialize) {\n const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec;\n if (operationSpec && operationSpec.responses) {\n const statusCode: number = parsedResponse.status;\n\n const expectedStatusCodes: string[] = Object.keys(operationSpec.responses);\n\n const hasNoExpectedStatusCodes: boolean =\n expectedStatusCodes.length === 0 ||\n (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === \"default\");\n\n const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse);\n\n const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes\n ? 200 <= statusCode && statusCode < 300\n : !!responseSpec;\n if (!isExpectedStatusCode) {\n const defaultResponseSpec: OperationResponse = operationSpec.responses.default;\n if (defaultResponseSpec) {\n const initialErrorMessage: string = isStreamOperation(operationSpec)\n ? `Unexpected status code: ${statusCode}`\n : (parsedResponse.bodyAsText as string);\n\n const error = new RestError(initialErrorMessage);\n error.statusCode = statusCode;\n error.request = utils.stripRequest(parsedResponse.request);\n error.response = utils.stripResponse(parsedResponse);\n\n let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody;\n try {\n if (parsedErrorResponse) {\n const defaultResponseBodyMapper: Mapper | undefined =\n defaultResponseSpec.bodyMapper;\n if (\n defaultResponseBodyMapper &&\n defaultResponseBodyMapper.serializedName === \"CloudError\"\n ) {\n if (parsedErrorResponse.error) {\n parsedErrorResponse = parsedErrorResponse.error;\n }\n if (parsedErrorResponse.code) {\n error.code = parsedErrorResponse.code;\n }\n if (parsedErrorResponse.message) {\n error.message = parsedErrorResponse.message;\n }\n } else {\n let internalError: any = parsedErrorResponse;\n if (parsedErrorResponse.error) {\n internalError = parsedErrorResponse.error;\n }\n\n error.code = internalError.code;\n if (internalError.message) {\n error.message = internalError.message;\n }\n }\n\n if (defaultResponseBodyMapper) {\n let valueToDeserialize: any = parsedErrorResponse;\n if (\n operationSpec.isXML &&\n defaultResponseBodyMapper.type.name === MapperType.Sequence\n ) {\n valueToDeserialize =\n typeof parsedErrorResponse === \"object\"\n ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!]\n : [];\n }\n error.body = operationSpec.serializer.deserialize(\n defaultResponseBodyMapper,\n valueToDeserialize,\n \"error.body\"\n );\n }\n }\n } catch (defaultError) {\n error.message = `Error \\\"${defaultError.message}\\\" occurred in deserializing the responseBody - \\\"${parsedResponse.bodyAsText}\\\" for the default response.`;\n }\n return Promise.reject(error);\n }\n } else if (responseSpec) {\n if (responseSpec.bodyMapper) {\n let valueToDeserialize: any = parsedResponse.parsedBody;\n if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) {\n valueToDeserialize =\n typeof valueToDeserialize === \"object\"\n ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!]\n : [];\n }\n try {\n parsedResponse.parsedBody = operationSpec.serializer.deserialize(\n responseSpec.bodyMapper,\n valueToDeserialize,\n \"operationRes.parsedBody\"\n );\n } catch (error) {\n const restError = new RestError(\n `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`\n );\n restError.request = utils.stripRequest(parsedResponse.request);\n restError.response = utils.stripResponse(parsedResponse);\n return Promise.reject(restError);\n }\n } else if (operationSpec.httpMethod === \"HEAD\") {\n // head methods never have a body, but we return a boolean to indicate presence/absence of the resource\n parsedResponse.parsedBody = response.status >= 200 && response.status < 300;\n }\n\n if (responseSpec.headersMapper) {\n parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(\n responseSpec.headersMapper,\n parsedResponse.headers.rawHeaders(),\n \"operationRes.parsedHeaders\"\n );\n }\n }\n }\n }\n return Promise.resolve(parsedResponse);\n });\n}\n\nfunction parse(\n jsonContentTypes: string[],\n xmlContentTypes: string[],\n operationResponse: HttpOperationResponse\n): Promise {\n const errorHandler = (err: Error & { code: string }) => {\n const msg = `Error \"${err}\" occurred while parsing the response body - ${operationResponse.bodyAsText}.`;\n const errCode = err.code || RestError.PARSE_ERROR;\n const e = new RestError(\n msg,\n errCode,\n operationResponse.status,\n operationResponse.request,\n operationResponse,\n operationResponse.bodyAsText\n );\n return Promise.reject(e);\n };\n\n if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) {\n const text = operationResponse.bodyAsText;\n const contentType: string = operationResponse.headers.get(\"Content-Type\") || \"\";\n const contentComponents: string[] = !contentType\n ? []\n : contentType.split(\";\").map((component) => component.toLowerCase());\n if (\n contentComponents.length === 0 ||\n contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)\n ) {\n return new Promise((resolve) => {\n operationResponse.parsedBody = JSON.parse(text);\n resolve(operationResponse);\n }).catch(errorHandler);\n } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) {\n return parseXML(text)\n .then((body) => {\n operationResponse.parsedBody = body;\n return operationResponse;\n })\n .catch(errorHandler);\n }\n }\n\n return Promise.resolve(operationResponse);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { RestError } from \"../restError\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function exponentialRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ExponentialRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\nconst DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\nconst DEFAULT_CLIENT_RETRY_COUNT = 3;\nconst DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\nconst DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n */\nexport class ExponentialRetryPolicy extends BaseRequestPolicy {\n /**\n * The client retry count.\n */\n retryCount: number;\n /**\n * The client retry interval in milliseconds.\n */\n retryInterval: number;\n /**\n * The minimum retry interval in milliseconds.\n */\n minRetryInterval: number;\n /**\n * The maximum retry interval in milliseconds.\n */\n maxRetryInterval: number;\n\n /**\n * @constructor\n * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain.\n * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy.\n * @param {number} [retryCount] The client retry count.\n * @param {number} [retryInterval] The client retry interval, in milliseconds.\n * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds.\n * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds.\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n function isNumber(n: any): n is number {\n return typeof n === \"number\";\n }\n this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval = isNumber(minRetryInterval)\n ? minRetryInterval\n : DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval = isNumber(maxRetryInterval)\n ? maxRetryInterval\n : DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => retry(this, request, response))\n .catch((error) => retry(this, request, error.response, undefined, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(\n policy: ExponentialRetryPolicy,\n statusCode: number | undefined,\n retryData: RetryData\n): boolean {\n if (\n statusCode == undefined ||\n (statusCode < 500 && statusCode !== 408) ||\n statusCode === 501 ||\n statusCode === 505\n ) {\n return false;\n }\n\n let currentCount: number;\n if (!retryData) {\n throw new Error(\"retryData for the ExponentialRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against.\n * @param {RetryData} retryData The retry data.\n * @param {RetryError} [err] The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: ExponentialRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 +\n Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nfunction retry(\n policy: ExponentialRetryPolicy,\n request: WebResourceLike,\n response?: HttpOperationResponse,\n retryData?: RetryData,\n requestError?: RetryError\n): Promise {\n retryData = updateRetryData(policy, retryData, requestError);\n const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted;\n if (!isAborted && shouldRetry(policy, response && response.status, retryData)) {\n return utils\n .delay(retryData.retryInterval)\n .then(() => policy._nextPolicy.sendRequest(request.clone()))\n .then((res) => retry(policy, request, res, retryData, undefined))\n .catch((err) => retry(policy, request, response, retryData, err));\n } else if (isAborted || requestError || !response) {\n // If the operation failed in the end, return all errors instead of just the last one\n const err =\n retryData.error ||\n new RestError(\n \"Failed to send the request.\",\n RestError.REQUEST_SEND_ERROR,\n response && response.status,\n response && response.request,\n response\n );\n return Promise.reject(err);\n } else {\n return Promise.resolve(response);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function generateClientRequestIdPolicy(\n requestIdHeaderName = \"x-ms-client-request-id\"\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName);\n },\n };\n}\n\nexport class GenerateClientRequestIdPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n private _requestIdHeaderName: string\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.headers.contains(this._requestIdHeaderName)) {\n request.headers.set(this._requestIdHeaderName, utils.generateUuid());\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport * as os from \"os\";\nimport { TelemetryInfo } from \"./userAgentPolicy\";\nimport { Constants } from \"../util/constants\";\n\nexport function getDefaultUserAgentKey(): string {\n return Constants.HeaderConstants.USER_AGENT;\n}\n\nexport function getPlatformSpecificData(): TelemetryInfo[] {\n const runtimeInfo = {\n key: \"Node\",\n value: process.version,\n };\n\n const osInfo = {\n key: \"OS\",\n value: `(${os.arch()}-${os.type()}-${os.release()})`,\n };\n\n return [runtimeInfo, osInfo];\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { getDefaultUserAgentKey, getPlatformSpecificData } from \"./msRestUserAgentPolicy\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport type TelemetryInfo = { key?: string; value?: string };\n\nfunction getRuntimeInfo(): TelemetryInfo[] {\n const msRestRuntime = {\n key: \"ms-rest-js\",\n value: Constants.msRestVersion,\n };\n\n return [msRestRuntime];\n}\n\nfunction getUserAgentString(\n telemetryInfo: TelemetryInfo[],\n keySeparator = \" \",\n valueSeparator = \"/\"\n): string {\n return telemetryInfo\n .map((info) => {\n const value = info.value ? `${valueSeparator}${info.value}` : \"\";\n return `${info.key}${value}`;\n })\n .join(keySeparator);\n}\n\nexport const getDefaultUserAgentHeaderName = getDefaultUserAgentKey;\n\nexport function getDefaultUserAgentValue(): string {\n const runtimeInfo = getRuntimeInfo();\n const platformSpecificData = getPlatformSpecificData();\n const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData));\n return userAgent;\n}\n\nexport function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory {\n const key: string =\n !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key;\n const value: string =\n !userAgentData || userAgentData.value == undefined\n ? getDefaultUserAgentValue()\n : userAgentData.value;\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new UserAgentPolicy(nextPolicy, options, key, value);\n },\n };\n}\n\nexport class UserAgentPolicy extends BaseRequestPolicy {\n constructor(\n readonly _nextPolicy: RequestPolicy,\n readonly _options: RequestPolicyOptionsLike,\n protected headerKey: string,\n protected headerValue: string\n ) {\n super(_nextPolicy, _options);\n }\n\n sendRequest(request: WebResourceLike): Promise {\n this.addUserAgentHeader(request);\n return this._nextPolicy.sendRequest(request);\n }\n\n addUserAgentHeader(request: WebResourceLike): void {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n\n if (!request.headers.get(this.headerKey) && this.headerValue) {\n request.headers.set(this.headerKey, this.headerValue);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { URLBuilder } from \"../url\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\n/**\n * Options for how redirect responses are handled.\n */\nexport interface RedirectOptions {\n /*\n * When true, redirect responses are followed. Defaults to true.\n */\n handleRedirects: boolean;\n\n /*\n * The maximum number of times the redirect URL will be tried before\n * failing. Defaults to 20.\n */\n maxRetries?: number;\n}\n\nexport const DefaultRedirectOptions: RedirectOptions = {\n handleRedirects: true,\n maxRetries: 20,\n};\n\nexport function redirectPolicy(maximumRetries = 20): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RedirectPolicy(nextPolicy, options, maximumRetries);\n },\n };\n}\n\nexport class RedirectPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly maxRetries = 20\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request)\n .then((response) => handleRedirect(this, response, 0));\n }\n}\n\nfunction handleRedirect(\n policy: RedirectPolicy,\n response: HttpOperationResponse,\n currentRetries: number\n): Promise {\n const { request, status } = response;\n const locationHeader = response.headers.get(\"location\");\n if (\n locationHeader &&\n (status === 300 ||\n (status === 301 && [\"GET\", \"HEAD\"].includes(request.method)) ||\n (status === 302 && [\"GET\", \"POST\", \"HEAD\"].includes(request.method)) ||\n (status === 303 && \"POST\" === request.method) ||\n status === 307) &&\n ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) ||\n (request.redirectLimit === undefined && currentRetries < policy.maxRetries))\n ) {\n const builder = URLBuilder.parse(request.url);\n builder.setPath(locationHeader);\n request.url = builder.toString();\n\n // POST request with Status code 302 and 303 should be converted into a\n // redirected GET request if the redirect url is present in the location header\n // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch\n if ((status === 302 || status === 303) && request.method === \"POST\") {\n request.method = \"GET\";\n delete request.body;\n }\n\n return policy._nextPolicy\n .sendRequest(request)\n .then((res) => handleRedirect(policy, res, currentRetries + 1))\n .then((res) => recordRedirect(res, request.url));\n }\n\n return Promise.resolve(response);\n}\n\nfunction recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse {\n // This is called as the recursive calls to handleRedirect() unwind,\n // only record the deepest/last redirect\n if (!response.redirected) {\n response.redirected = true;\n response.url = redirect;\n }\n return response;\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new RPRegistrationPolicy(nextPolicy, options, retryTimeout);\n },\n };\n}\n\nexport class RPRegistrationPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n readonly _retryTimeout = 30\n ) {\n super(nextPolicy, options);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .then((response) => registerIfNeeded(this, request, response));\n }\n}\n\nfunction registerIfNeeded(\n policy: RPRegistrationPolicy,\n request: WebResourceLike,\n response: HttpOperationResponse\n): Promise {\n if (response.status === 409) {\n const rpName = checkRPNotRegisteredError(response.bodyAsText as string);\n if (rpName) {\n const urlPrefix = extractSubscriptionUrl(request.url);\n return (\n registerRP(policy, urlPrefix, rpName, request)\n // Autoregistration of ${provider} failed for some reason. We will not return this error\n // instead will return the initial response with 409 status code back to the user.\n // do nothing here as we are returning the original response at the end of this method.\n .catch(() => false)\n .then((registrationStatus) => {\n if (registrationStatus) {\n // Retry the original request. We have to change the x-ms-client-request-id\n // otherwise Azure endpoint will return the initial 409 (cached) response.\n request.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n return policy._nextPolicy.sendRequest(request.clone());\n }\n return response;\n })\n );\n }\n }\n\n return Promise.resolve(response);\n}\n\n/**\n * Reuses the headers of the original request and url (if specified).\n * @param {WebResourceLike} originalRequest The original request\n * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false.\n * @returns {object} A new request object with desired headers.\n */\nfunction getRequestEssentials(\n originalRequest: WebResourceLike,\n reuseUrlToo = false\n): WebResourceLike {\n const reqOptions: WebResourceLike = originalRequest.clone();\n if (reuseUrlToo) {\n reqOptions.url = originalRequest.url;\n }\n\n // We have to change the x-ms-client-request-id otherwise Azure endpoint\n // will return the initial 409 (cached) response.\n reqOptions.headers.set(\"x-ms-client-request-id\", utils.generateUuid());\n\n // Set content-type to application/json\n reqOptions.headers.set(\"Content-Type\", \"application/json; charset=utf-8\");\n\n return reqOptions;\n}\n\n/**\n * Validates the error code and message associated with 409 response status code. If it matches to that of\n * RP not registered then it returns the name of the RP else returns undefined.\n * @param {string} body The response body received after making the original request.\n * @returns {string} The name of the RP if condition is satisfied else undefined.\n */\nfunction checkRPNotRegisteredError(body: string): string {\n let result, responseBody;\n if (body) {\n try {\n responseBody = JSON.parse(body);\n } catch (err) {\n // do nothing;\n }\n if (\n responseBody &&\n responseBody.error &&\n responseBody.error.message &&\n responseBody.error.code &&\n responseBody.error.code === \"MissingSubscriptionRegistration\"\n ) {\n const matchRes = responseBody.error.message.match(/.*'(.*)'/i);\n if (matchRes) {\n result = matchRes.pop();\n }\n }\n }\n return result;\n}\n\n/**\n * Extracts the first part of the URL, just after subscription:\n * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} url The original request url\n * @returns {string} The url prefix as explained above.\n */\nfunction extractSubscriptionUrl(url: string): string {\n let result;\n const matchRes = url.match(/.*\\/subscriptions\\/[a-f0-9-]+\\//gi);\n if (matchRes && matchRes[0]) {\n result = matchRes[0];\n } else {\n throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`);\n }\n return result;\n}\n\n/**\n * Registers the given provider.\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/\n * @param {string} provider The provider name to be registered.\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @param {registrationCallback} callback The callback that handles the RP registration\n */\nfunction registerRP(\n policy: RPRegistrationPolicy,\n urlPrefix: string,\n provider: string,\n originalRequest: WebResourceLike\n): Promise {\n const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`;\n const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`;\n const reqOptions = getRequestEssentials(originalRequest);\n reqOptions.method = \"POST\";\n reqOptions.url = postUrl;\n\n return policy._nextPolicy.sendRequest(reqOptions).then((response) => {\n if (response.status !== 200) {\n throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`);\n }\n return getRegistrationStatus(policy, getUrl, originalRequest);\n });\n}\n\n/**\n * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds.\n * Polling will happen till the registrationState property of the response body is \"Registered\".\n * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against.\n * @param {string} url The request url for polling\n * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response\n * with a message that the provider is not registered.\n * @returns {Promise} True if RP Registration is successful.\n */\nfunction getRegistrationStatus(\n policy: RPRegistrationPolicy,\n url: string,\n originalRequest: WebResourceLike\n): Promise {\n const reqOptions: any = getRequestEssentials(originalRequest);\n reqOptions.url = url;\n reqOptions.method = \"GET\";\n\n return policy._nextPolicy.sendRequest(reqOptions).then((res) => {\n const obj = res.parsedBody as any;\n if (res.parsedBody && obj.registrationState && obj.registrationState === \"Registered\") {\n return true;\n } else {\n return utils\n .delay(policy._retryTimeout * 1000)\n .then(() => getRegistrationStatus(policy, url, originalRequest));\n }\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"../credentials/serviceClientCredentials\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function signingPolicy(\n authenticationProvider: ServiceClientCredentials\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SigningPolicy(nextPolicy, options, authenticationProvider);\n },\n };\n}\n\nexport class SigningPolicy extends BaseRequestPolicy {\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n public authenticationProvider: ServiceClientCredentials\n ) {\n super(nextPolicy, options);\n }\n\n signRequest(request: WebResourceLike): Promise {\n return this.authenticationProvider.signRequest(request);\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this.signRequest(request).then((nextRequest) =>\n this._nextPolicy.sendRequest(nextRequest)\n );\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport * as utils from \"../util/utils\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport interface RetryData {\n retryCount: number;\n retryInterval: number;\n error?: RetryError;\n}\n\nexport interface RetryError extends Error {\n message: string;\n code?: string;\n innerError?: RetryError;\n}\n\nexport function systemErrorRetryPolicy(\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new SystemErrorRetryPolicy(\n nextPolicy,\n options,\n retryCount,\n retryInterval,\n minRetryInterval,\n maxRetryInterval\n );\n },\n };\n}\n\n/**\n * @class\n * Instantiates a new \"ExponentialRetryPolicyFilter\" instance.\n *\n * @constructor\n * @param {number} retryCount The client retry count.\n * @param {number} retryInterval The client retry interval, in milliseconds.\n * @param {number} minRetryInterval The minimum retry interval, in milliseconds.\n * @param {number} maxRetryInterval The maximum retry interval, in milliseconds.\n */\nexport class SystemErrorRetryPolicy extends BaseRequestPolicy {\n retryCount: number;\n retryInterval: number;\n minRetryInterval: number;\n maxRetryInterval: number;\n DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30;\n DEFAULT_CLIENT_RETRY_COUNT = 3;\n DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90;\n DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n retryCount?: number,\n retryInterval?: number,\n minRetryInterval?: number,\n maxRetryInterval?: number\n ) {\n super(nextPolicy, options);\n this.retryCount = typeof retryCount === \"number\" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT;\n this.retryInterval =\n typeof retryInterval === \"number\" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL;\n this.minRetryInterval =\n typeof minRetryInterval === \"number\"\n ? minRetryInterval\n : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL;\n this.maxRetryInterval =\n typeof maxRetryInterval === \"number\"\n ? maxRetryInterval\n : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy\n .sendRequest(request.clone())\n .catch((error) => retry(this, request, error.response, error));\n }\n}\n\n/**\n * Determines if the operation should be retried and how long to wait until the next retry.\n *\n * @param {number} statusCode The HTTP status code.\n * @param {RetryData} retryData The retry data.\n * @return {boolean} True if the operation qualifies for a retry; false otherwise.\n */\nfunction shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean {\n let currentCount;\n if (!retryData) {\n throw new Error(\"retryData for the SystemErrorRetryPolicyFilter cannot be null.\");\n } else {\n currentCount = retryData && retryData.retryCount;\n }\n return currentCount < policy.retryCount;\n}\n\n/**\n * Updates the retry data for the next attempt.\n *\n * @param {RetryData} retryData The retry data.\n * @param {object} err The operation\"s error, if any.\n */\nfunction updateRetryData(\n policy: SystemErrorRetryPolicy,\n retryData?: RetryData,\n err?: RetryError\n): RetryData {\n if (!retryData) {\n retryData = {\n retryCount: 0,\n retryInterval: 0,\n };\n }\n\n if (err) {\n if (retryData.error) {\n err.innerError = retryData.error;\n }\n\n retryData.error = err;\n }\n\n // Adjust retry count\n retryData.retryCount++;\n\n // Adjust retry interval\n let incrementDelta = Math.pow(2, retryData.retryCount) - 1;\n const boundedRandDelta =\n policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4));\n incrementDelta *= boundedRandDelta;\n\n retryData.retryInterval = Math.min(\n policy.minRetryInterval + incrementDelta,\n policy.maxRetryInterval\n );\n\n return retryData;\n}\n\nasync function retry(\n policy: SystemErrorRetryPolicy,\n request: WebResourceLike,\n operationResponse: HttpOperationResponse,\n err?: RetryError,\n retryData?: RetryData\n): Promise {\n retryData = updateRetryData(policy, retryData, err);\n if (\n err &&\n err.code &&\n shouldRetry(policy, retryData) &&\n (err.code === \"ETIMEDOUT\" ||\n err.code === \"ESOCKETTIMEDOUT\" ||\n err.code === \"ECONNREFUSED\" ||\n err.code === \"ECONNRESET\" ||\n err.code === \"ENOENT\")\n ) {\n // If previous operation ended with an error and the policy allows a retry, do that\n try {\n await utils.delay(retryData.retryInterval);\n return policy._nextPolicy.sendRequest(request.clone());\n } catch (error) {\n return retry(policy, request, operationResponse, error, retryData);\n }\n } else {\n if (err) {\n // If the operation failed in the end, return all errors instead of just the last one\n return Promise.reject(retryData.error);\n }\n return operationResponse;\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\n/**\n * The format that will be used to join an array of values together for a query parameter value.\n */\nexport enum QueryCollectionFormat {\n Csv = \",\",\n Ssv = \" \",\n Tsv = \"\\t\",\n Pipes = \"|\",\n Multi = \"Multi\",\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { AgentSettings } from \"../serviceClient\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\n\nexport function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new AgentPolicy(nextPolicy, options, agentSettings!);\n },\n };\n}\n\nexport class AgentPolicy extends BaseRequestPolicy {\n agentSettings: AgentSettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n agentSettings: AgentSettings\n ) {\n super(nextPolicy, options);\n this.agentSettings = agentSettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.agentSettings) {\n request.agentSettings = this.agentSettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { ProxySettings } from \"../serviceClient\";\nimport { WebResourceLike } from \"../webResource\";\nimport { Constants } from \"../util/constants\";\nimport { URLBuilder } from \"../url\";\n\n/**\n * @internal\n */\nexport const noProxyList: string[] = loadNoProxy();\nconst byPassedList: Map = new Map();\n\n/**\n * @internal\n */\nexport function getEnvironmentValue(name: string): string | undefined {\n if (process.env[name]) {\n return process.env[name];\n } else if (process.env[name.toLowerCase()]) {\n return process.env[name.toLowerCase()];\n }\n return undefined;\n}\n\nfunction loadEnvironmentProxyValue(): string | undefined {\n if (!process) {\n return undefined;\n }\n\n const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY);\n const allProxy = getEnvironmentValue(Constants.ALL_PROXY);\n const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY);\n\n return httpsProxy || allProxy || httpProxy;\n}\n\n// Check whether the host of a given `uri` is in the noProxyList.\n// If there's a match, any request sent to the same host won't have the proxy settings set.\n// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210\nfunction isBypassed(uri: string): boolean | undefined {\n if (noProxyList.length === 0) {\n return false;\n }\n const host = URLBuilder.parse(uri).getHost()!;\n if (byPassedList.has(host)) {\n return byPassedList.get(host);\n }\n let isBypassedFlag = false;\n for (const pattern of noProxyList) {\n if (pattern[0] === \".\") {\n // This should match either domain it self or any subdomain or host\n // .foo.com will match foo.com it self or *.foo.com\n if (host.endsWith(pattern)) {\n isBypassedFlag = true;\n } else {\n if (host.length === pattern.length - 1 && host === pattern.slice(1)) {\n isBypassedFlag = true;\n }\n }\n } else {\n if (host === pattern) {\n isBypassedFlag = true;\n }\n }\n }\n byPassedList.set(host, isBypassedFlag);\n return isBypassedFlag;\n}\n\n/**\n * @internal\n */\nexport function loadNoProxy(): string[] {\n const noProxy = getEnvironmentValue(Constants.NO_PROXY);\n if (noProxy) {\n return noProxy\n .split(\",\")\n .map((item) => item.trim())\n .filter((item) => item.length);\n }\n\n return [];\n}\n\n/**\n * @internal\n */\nfunction extractAuthFromUrl(\n url: string\n): { username?: string; password?: string; urlWithoutAuth: string } {\n const atIndex = url.indexOf(\"@\");\n if (atIndex === -1) {\n return { urlWithoutAuth: url };\n }\n\n const schemeIndex = url.indexOf(\"://\");\n const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0;\n const auth = url.substring(authStart, atIndex);\n const colonIndex = auth.indexOf(\":\");\n const hasPassword = colonIndex !== -1;\n const username = hasPassword ? auth.substring(0, colonIndex) : auth;\n const password = hasPassword ? auth.substring(colonIndex + 1) : undefined;\n const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1);\n return {\n username,\n password,\n urlWithoutAuth,\n };\n}\n\nexport function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined {\n if (!proxyUrl) {\n proxyUrl = loadEnvironmentProxyValue();\n if (!proxyUrl) {\n return undefined;\n }\n }\n\n const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl);\n const parsedUrl = URLBuilder.parse(urlWithoutAuth);\n const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + \"://\" : \"\";\n return {\n host: schema + parsedUrl.getHost(),\n port: Number.parseInt(parsedUrl.getPort() || \"80\"),\n username,\n password,\n };\n}\n\nexport function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory {\n if (!proxySettings) {\n proxySettings = getDefaultProxySettings();\n }\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ProxyPolicy(nextPolicy, options, proxySettings!);\n },\n };\n}\n\nexport class ProxyPolicy extends BaseRequestPolicy {\n proxySettings: ProxySettings;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n proxySettings: ProxySettings\n ) {\n super(nextPolicy, options);\n this.proxySettings = proxySettings;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n if (!request.proxySettings && !isBypassed(request.url)) {\n request.proxySettings = this.proxySettings;\n }\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyOptionsLike,\n RequestPolicyFactory,\n} from \"./requestPolicy\";\nimport { WebResourceLike } from \"../webResource\";\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { Constants } from \"../util/constants\";\nimport { delay } from \"../util/utils\";\n\nconst StatusCodes = Constants.HttpConstants.StatusCodes;\nconst DEFAULT_RETRY_COUNT = 3;\n\n/**\n * Options that control how to retry on response status code 429.\n */\nexport interface ThrottlingRetryOptions {\n /**\n * The maximum number of retry attempts. Defaults to 3.\n */\n maxRetries?: number;\n}\n\nexport function throttlingRetryPolicy(\n maxRetries: number = DEFAULT_RETRY_COUNT\n): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries);\n },\n };\n}\n\n/**\n * To learn more, please refer to\n * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits,\n * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and\n * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors\n */\nexport class ThrottlingRetryPolicy extends BaseRequestPolicy {\n private retryLimit: number;\n\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) {\n super(nextPolicy, options);\n this.retryLimit = retryLimit;\n }\n\n public async sendRequest(httpRequest: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => {\n return this.retry(httpRequest, response, 0);\n });\n }\n\n private async retry(\n httpRequest: WebResourceLike,\n httpResponse: HttpOperationResponse,\n retryCount: number\n ): Promise {\n if (httpResponse.status !== StatusCodes.TooManyRequests) {\n return httpResponse;\n }\n\n const retryAfterHeader: string | undefined = httpResponse.headers.get(\n Constants.HeaderConstants.RETRY_AFTER\n );\n\n if (retryAfterHeader && retryCount < this.retryLimit) {\n const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader(\n retryAfterHeader\n );\n if (delayInMs) {\n await delay(delayInMs);\n const res = await this._nextPolicy.sendRequest(httpRequest);\n return this.retry(httpRequest, res, retryCount + 1);\n }\n }\n\n return httpResponse;\n }\n\n public static parseRetryAfterHeader(headerValue: string): number | undefined {\n const retryAfterInSeconds = Number(headerValue);\n if (Number.isNaN(retryAfterInSeconds)) {\n return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue);\n } else {\n return retryAfterInSeconds * 1000;\n }\n }\n\n public static parseDateRetryAfterHeader(headerValue: string): number | undefined {\n try {\n const now: number = Date.now();\n const date: number = Date.parse(headerValue);\n const diff = date - now;\n\n return Number.isNaN(diff) ? undefined : diff;\n } catch (error) {\n return undefined;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nimport { Constants as MSRestConstants } from \"../util/constants\";\nimport { WebResource } from \"../webResource\";\n\nimport { TokenCredential } from \"@azure/core-auth\";\nimport { TokenResponse } from \"./tokenResponse\";\n\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter.\n */\nexport const azureResourceManagerEndpoints = [\n \"https://management.windows.net\",\n \"https://management.chinacloudapi.cn\",\n \"https://management.usgovcloudapi.net\",\n \"https://management.cloudapi.de\",\n];\n\n/**\n * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to\n * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication.\n */\nexport class AzureIdentityCredentialAdapter implements ServiceClientCredentials {\n private azureTokenCredential: TokenCredential;\n private scopes: string | string[];\n constructor(\n azureTokenCredential: TokenCredential,\n scopes: string | string[] = \"https://management.azure.com/.default\"\n ) {\n this.azureTokenCredential = azureTokenCredential;\n this.scopes = scopes;\n }\n\n public async getToken(): Promise {\n const accessToken = await this.azureTokenCredential.getToken(this.scopes);\n if (accessToken !== null) {\n const result: TokenResponse = {\n accessToken: accessToken.token,\n tokenType: DEFAULT_AUTHORIZATION_SCHEME,\n expiresOn: accessToken.expiresOnTimestamp,\n };\n return result;\n } else {\n throw new Error(\"Could find token for scope\");\n }\n }\n\n public async signRequest(webResource: WebResource) {\n const tokenResponse = await this.getToken();\n webResource.headers.set(\n MSRestConstants.HeaderConstants.AUTHORIZATION,\n `${tokenResponse.tokenType} ${tokenResponse.accessToken}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { TokenCredential, isTokenCredential } from \"@azure/core-auth\";\nimport { ServiceClientCredentials } from \"./credentials/serviceClientCredentials\";\nimport { DefaultHttpClient } from \"./defaultHttpClient\";\nimport { HttpClient } from \"./httpClient\";\nimport { HttpOperationResponse, RestResponse } from \"./httpOperationResponse\";\nimport { HttpPipelineLogger } from \"./httpPipelineLogger\";\nimport { OperationArguments } from \"./operationArguments\";\nimport {\n getPathStringFromParameter,\n getPathStringFromParameterPath,\n OperationParameter,\n ParameterPath,\n} from \"./operationParameter\";\nimport { isStreamOperation, OperationSpec } from \"./operationSpec\";\nimport {\n deserializationPolicy,\n DeserializationContentTypes,\n} from \"./policies/deserializationPolicy\";\nimport { exponentialRetryPolicy } from \"./policies/exponentialRetryPolicy\";\nimport { generateClientRequestIdPolicy } from \"./policies/generateClientRequestIdPolicy\";\nimport {\n userAgentPolicy,\n getDefaultUserAgentHeaderName,\n getDefaultUserAgentValue,\n} from \"./policies/userAgentPolicy\";\nimport { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from \"./policies/redirectPolicy\";\nimport {\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RequestPolicyOptionsLike,\n} from \"./policies/requestPolicy\";\nimport { rpRegistrationPolicy } from \"./policies/rpRegistrationPolicy\";\nimport { signingPolicy } from \"./policies/signingPolicy\";\nimport { systemErrorRetryPolicy } from \"./policies/systemErrorRetryPolicy\";\nimport { QueryCollectionFormat } from \"./queryCollectionFormat\";\nimport { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from \"./serializer\";\nimport { URLBuilder } from \"./url\";\nimport * as utils from \"./util/utils\";\nimport { stringifyXML } from \"./util/xml\";\nimport {\n RequestOptionsBase,\n RequestPrepareOptions,\n WebResourceLike,\n isWebResourceLike,\n WebResource,\n} from \"./webResource\";\nimport { OperationResponse } from \"./operationResponse\";\nimport { ServiceCallback } from \"./util/utils\";\nimport { agentPolicy } from \"./policies/agentPolicy\";\nimport { proxyPolicy, getDefaultProxySettings } from \"./policies/proxyPolicy\";\nimport { throttlingRetryPolicy } from \"./policies/throttlingRetryPolicy\";\nimport { Agent } from \"http\";\nimport {\n AzureIdentityCredentialAdapter,\n azureResourceManagerEndpoints,\n} from \"./credentials/azureIdentityTokenCredentialAdapter\";\n\n/**\n * HTTP proxy settings (Node.js only)\n */\nexport interface ProxySettings {\n host: string;\n port: number;\n username?: string;\n password?: string;\n}\n\n/**\n * HTTP and HTTPS agents (Node.js only)\n */\nexport interface AgentSettings {\n http: Agent;\n https: Agent;\n}\n\n/**\n * Options to be provided while creating the client.\n */\nexport interface ServiceClientOptions {\n /**\n * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP\n * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns\n * the requestPolicyFactories that will be used.\n */\n requestPolicyFactories?:\n | RequestPolicyFactory[]\n | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]);\n /**\n * The HttpClient that will be used to send HTTP requests.\n */\n httpClient?: HttpClient;\n /**\n * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline.\n */\n httpPipelineLogger?: HttpPipelineLogger;\n /**\n * If set to true, turn off the default retry policy.\n */\n noRetryPolicy?: boolean;\n /**\n * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30.\n */\n rpRegistrationRetryTimeout?: number;\n /**\n * Whether or not to generate a client request ID header for each HTTP request.\n */\n generateClientRequestIdHeader?: boolean;\n /**\n * Whether to include credentials in CORS requests in the browser.\n * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information.\n */\n withCredentials?: boolean;\n /**\n * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a\n * header to all outgoing requests with this header name and a random UUID as the request ID.\n */\n clientRequestIdHeaderName?: string;\n /**\n * The content-types that will be associated with JSON or XML serialization.\n */\n deserializationContentTypes?: DeserializationContentTypes;\n /**\n * The header name to use for the telemetry header while sending the request. If this is not\n * specified, then \"User-Agent\" will be used when running on Node.js and \"x-ms-command-name\" will\n * be used when running in a browser.\n */\n userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string);\n /**\n * The string to be set to the telemetry header while sending the request, or a function that\n * takes in the default user-agent string and returns the user-agent string that will be used.\n */\n userAgent?: string | ((defaultUserAgent: string) => string);\n /**\n * Proxy settings which will be used for every HTTP request (Node.js only).\n */\n proxySettings?: ProxySettings;\n /**\n * Options for how redirect responses are handled.\n */\n redirectOptions?: RedirectOptions;\n /**\n * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only).\n */\n agentSettings?: AgentSettings;\n /**\n * If specified:\n * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient.\n * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to \"https://management.azure.com/.default\".\n *\n * If it is not specified:\n * - All OperationSpecs must contain a baseUrl property.\n * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be \"https://management.azure.com/.default\".\n */\n baseUri?: string;\n}\n\n/**\n * @class\n * Initializes a new instance of the ServiceClient.\n */\nexport class ServiceClient {\n /**\n * The base URI against which requests will be made when using this ServiceClient instance.\n *\n * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient.\n * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default \"https://management.azure.com/.default\"\n *\n * If it is not specified, all OperationSpecs must contain a baseUrl property.\n */\n protected baseUri?: string;\n\n /**\n * The default request content type for the service.\n * Used if no requestContentType is present on an OperationSpec.\n */\n protected requestContentType?: string;\n\n /**\n * The HTTP client that will be used to send requests.\n */\n private readonly _httpClient: HttpClient;\n private readonly _requestPolicyOptions: RequestPolicyOptionsLike;\n\n private readonly _requestPolicyFactories: RequestPolicyFactory[];\n private readonly _withCredentials: boolean;\n\n /**\n * The ServiceClient constructor\n * @constructor\n * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication.\n * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client.\n */\n constructor(\n credentials?: ServiceClientCredentials | TokenCredential,\n options?: ServiceClientOptions\n ) {\n if (!options) {\n options = {};\n }\n\n if (options.baseUri) {\n this.baseUri = options.baseUri;\n }\n\n let serviceClientCredentials: ServiceClientCredentials | undefined;\n if (isTokenCredential(credentials)) {\n let scope: string | undefined = undefined;\n if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) {\n scope = `${options.baseUri}/.default`;\n }\n serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope);\n } else {\n serviceClientCredentials = credentials;\n }\n\n if (serviceClientCredentials && !serviceClientCredentials.signRequest) {\n throw new Error(\"credentials argument needs to implement signRequest method\");\n }\n\n this._withCredentials = options.withCredentials || false;\n this._httpClient = options.httpClient || new DefaultHttpClient();\n this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger);\n\n let requestPolicyFactories: RequestPolicyFactory[];\n if (Array.isArray(options.requestPolicyFactories)) {\n requestPolicyFactories = options.requestPolicyFactories;\n } else {\n requestPolicyFactories = createDefaultRequestPolicyFactories(\n serviceClientCredentials,\n options\n );\n if (options.requestPolicyFactories) {\n const newRequestPolicyFactories:\n | void\n | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories);\n if (newRequestPolicyFactories) {\n requestPolicyFactories = newRequestPolicyFactories;\n }\n }\n }\n this._requestPolicyFactories = requestPolicyFactories;\n }\n\n /**\n * Send the provided httpRequest.\n */\n sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise {\n if (options === null || options === undefined || typeof options !== \"object\") {\n throw new Error(\"options cannot be null or undefined and it must be of type object.\");\n }\n\n let httpRequest: WebResourceLike;\n try {\n if (isWebResourceLike(options)) {\n options.validateRequestProperties();\n httpRequest = options;\n } else {\n httpRequest = new WebResource();\n httpRequest = httpRequest.prepare(options);\n }\n } catch (error) {\n return Promise.reject(error);\n }\n\n let httpPipeline: RequestPolicy = this._httpClient;\n if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) {\n for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) {\n httpPipeline = this._requestPolicyFactories[i].create(\n httpPipeline,\n this._requestPolicyOptions\n );\n }\n }\n return httpPipeline.sendRequest(httpRequest);\n }\n\n /**\n * Send an HTTP request that is populated using the provided OperationSpec.\n * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from.\n * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest.\n * @param {ServiceCallback} callback The callback to call when the response is received.\n */\n sendOperationRequest(\n operationArguments: OperationArguments,\n operationSpec: OperationSpec,\n callback?: ServiceCallback\n ): Promise {\n if (typeof operationArguments.options === \"function\") {\n callback = operationArguments.options;\n operationArguments.options = undefined;\n }\n\n const httpRequest = new WebResource();\n\n let result: Promise;\n try {\n const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri;\n if (!baseUri) {\n throw new Error(\n \"If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use.\"\n );\n }\n\n httpRequest.method = operationSpec.httpMethod;\n httpRequest.operationSpec = operationSpec;\n\n const requestUrl: URLBuilder = URLBuilder.parse(baseUri);\n if (operationSpec.path) {\n requestUrl.appendPath(operationSpec.path);\n }\n if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) {\n for (const urlParameter of operationSpec.urlParameters) {\n let urlParameterValue: string = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n urlParameter,\n operationSpec.serializer\n );\n urlParameterValue = operationSpec.serializer.serialize(\n urlParameter.mapper,\n urlParameterValue,\n getPathStringFromParameter(urlParameter)\n );\n if (!urlParameter.skipEncoding) {\n urlParameterValue = encodeURIComponent(urlParameterValue);\n }\n requestUrl.replaceAll(\n `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`,\n urlParameterValue\n );\n }\n }\n if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) {\n for (const queryParameter of operationSpec.queryParameters) {\n let queryParameterValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n queryParameter,\n operationSpec.serializer\n );\n if (queryParameterValue != undefined) {\n queryParameterValue = operationSpec.serializer.serialize(\n queryParameter.mapper,\n queryParameterValue,\n getPathStringFromParameter(queryParameter)\n );\n if (queryParameter.collectionFormat != undefined) {\n if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) {\n if (queryParameterValue.length === 0) {\n queryParameterValue = \"\";\n } else {\n for (const index in queryParameterValue) {\n const item = queryParameterValue[index];\n queryParameterValue[index] = item == undefined ? \"\" : item.toString();\n }\n }\n } else if (\n queryParameter.collectionFormat === QueryCollectionFormat.Ssv ||\n queryParameter.collectionFormat === QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n }\n if (!queryParameter.skipEncoding) {\n if (Array.isArray(queryParameterValue)) {\n for (const index in queryParameterValue) {\n if (\n queryParameterValue[index] !== undefined &&\n queryParameterValue[index] !== null\n ) {\n queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]);\n }\n }\n } else {\n queryParameterValue = encodeURIComponent(queryParameterValue);\n }\n }\n if (\n queryParameter.collectionFormat != undefined &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Multi &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Ssv &&\n queryParameter.collectionFormat !== QueryCollectionFormat.Tsv\n ) {\n queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat);\n }\n requestUrl.setQueryParameter(\n queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter),\n queryParameterValue\n );\n }\n }\n }\n httpRequest.url = requestUrl.toString();\n\n const contentType = operationSpec.contentType || this.requestContentType;\n if (contentType) {\n httpRequest.headers.set(\"Content-Type\", contentType);\n }\n\n if (operationSpec.headerParameters) {\n for (const headerParameter of operationSpec.headerParameters) {\n let headerValue: any = getOperationArgumentValueFromParameter(\n this,\n operationArguments,\n headerParameter,\n operationSpec.serializer\n );\n if (headerValue != undefined) {\n headerValue = operationSpec.serializer.serialize(\n headerParameter.mapper,\n headerValue,\n getPathStringFromParameter(headerParameter)\n );\n const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper)\n .headerCollectionPrefix;\n if (headerCollectionPrefix) {\n for (const key of Object.keys(headerValue)) {\n httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]);\n }\n } else {\n httpRequest.headers.set(\n headerParameter.mapper.serializedName ||\n getPathStringFromParameter(headerParameter),\n headerValue\n );\n }\n }\n }\n }\n\n const options: RequestOptionsBase | undefined = operationArguments.options;\n if (options) {\n if (options.customHeaders) {\n for (const customHeaderName in options.customHeaders) {\n httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]);\n }\n }\n\n if (options.abortSignal) {\n httpRequest.abortSignal = options.abortSignal;\n }\n\n if (options.timeout) {\n httpRequest.timeout = options.timeout;\n }\n\n if (options.onUploadProgress) {\n httpRequest.onUploadProgress = options.onUploadProgress;\n }\n\n if (options.onDownloadProgress) {\n httpRequest.onDownloadProgress = options.onDownloadProgress;\n }\n }\n\n httpRequest.withCredentials = this._withCredentials;\n\n serializeRequestBody(this, httpRequest, operationArguments, operationSpec);\n\n if (httpRequest.streamResponseBody == undefined) {\n httpRequest.streamResponseBody = isStreamOperation(operationSpec);\n }\n\n result = this.sendRequest(httpRequest).then((res) =>\n flattenResponse(res, operationSpec.responses[res.status])\n );\n } catch (error) {\n result = Promise.reject(error);\n }\n\n const cb = callback;\n if (cb) {\n result\n // tslint:disable-next-line:no-null-keyword\n .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response))\n .catch((err) => cb(err));\n }\n\n return result;\n }\n}\n\nexport function serializeRequestBody(\n serviceClient: ServiceClient,\n httpRequest: WebResourceLike,\n operationArguments: OperationArguments,\n operationSpec: OperationSpec\n): void {\n if (operationSpec.requestBody && operationSpec.requestBody.mapper) {\n httpRequest.body = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n operationSpec.requestBody,\n operationSpec.serializer\n );\n\n const bodyMapper = operationSpec.requestBody.mapper;\n const { required, xmlName, xmlElementName, serializedName } = bodyMapper;\n const typeName = bodyMapper.type.name;\n try {\n if (httpRequest.body != undefined || required) {\n const requestBodyParameterPathString: string = getPathStringFromParameter(\n operationSpec.requestBody\n );\n httpRequest.body = operationSpec.serializer.serialize(\n bodyMapper,\n httpRequest.body,\n requestBodyParameterPathString\n );\n const isStream = typeName === MapperType.Stream;\n if (operationSpec.isXML) {\n if (typeName === MapperType.Sequence) {\n httpRequest.body = stringifyXML(\n utils.prepareXMLRootList(\n httpRequest.body,\n xmlElementName || xmlName || serializedName!\n ),\n { rootName: xmlName || serializedName }\n );\n } else if (!isStream) {\n httpRequest.body = stringifyXML(httpRequest.body, {\n rootName: xmlName || serializedName,\n });\n }\n } else if (!isStream) {\n httpRequest.body = JSON.stringify(httpRequest.body);\n }\n }\n } catch (error) {\n throw new Error(\n `Error \"${error.message}\" occurred in serializing the payload - ${JSON.stringify(\n serializedName,\n undefined,\n \" \"\n )}.`\n );\n }\n } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) {\n httpRequest.formData = {};\n for (const formDataParameter of operationSpec.formDataParameters) {\n const formDataParameterValue: any = getOperationArgumentValueFromParameter(\n serviceClient,\n operationArguments,\n formDataParameter,\n operationSpec.serializer\n );\n if (formDataParameterValue != undefined) {\n const formDataParameterPropertyName: string =\n formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter);\n httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(\n formDataParameter.mapper,\n formDataParameterValue,\n getPathStringFromParameter(formDataParameter)\n );\n }\n }\n }\n}\n\nfunction isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory {\n return typeof instance.create === \"function\";\n}\n\nfunction getValueOrFunctionResult(\n value: undefined | string | ((defaultValue: string) => string),\n defaultValueCreator: () => string\n): string {\n let result: string;\n if (typeof value === \"string\") {\n result = value;\n } else {\n result = defaultValueCreator();\n if (typeof value === \"function\") {\n result = value(result);\n }\n }\n return result;\n}\n\nfunction createDefaultRequestPolicyFactories(\n credentials: ServiceClientCredentials | RequestPolicyFactory | undefined,\n options: ServiceClientOptions\n): RequestPolicyFactory[] {\n const factories: RequestPolicyFactory[] = [];\n\n if (options.generateClientRequestIdHeader) {\n factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName));\n }\n\n if (credentials) {\n if (isRequestPolicyFactory(credentials)) {\n factories.push(credentials);\n } else {\n factories.push(signingPolicy(credentials));\n }\n }\n\n const userAgentHeaderName: string = getValueOrFunctionResult(\n options.userAgentHeaderName,\n getDefaultUserAgentHeaderName\n );\n const userAgentHeaderValue: string = getValueOrFunctionResult(\n options.userAgent,\n getDefaultUserAgentValue\n );\n if (userAgentHeaderName && userAgentHeaderValue) {\n factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue }));\n }\n\n const redirectOptions = {\n ...DefaultRedirectOptions,\n ...options.redirectOptions,\n };\n if (redirectOptions.handleRedirects) {\n factories.push(redirectPolicy(redirectOptions.maxRetries));\n }\n\n factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout));\n\n if (!options.noRetryPolicy) {\n factories.push(exponentialRetryPolicy());\n factories.push(systemErrorRetryPolicy());\n factories.push(throttlingRetryPolicy());\n }\n\n factories.push(deserializationPolicy(options.deserializationContentTypes));\n\n const proxySettings = options.proxySettings || getDefaultProxySettings();\n if (proxySettings) {\n factories.push(proxyPolicy(proxySettings));\n }\n\n if (options.agentSettings) {\n factories.push(agentPolicy(options.agentSettings));\n }\n\n return factories;\n}\n\nexport type PropertyParent = { [propertyName: string]: any };\n\n/**\n * Get the property parent for the property at the provided path when starting with the provided\n * parent object.\n */\nexport function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent {\n if (parent && propertyPath) {\n const propertyPathLength: number = propertyPath.length;\n for (let i = 0; i < propertyPathLength - 1; ++i) {\n const propertyName: string = propertyPath[i];\n if (!parent[propertyName]) {\n parent[propertyName] = {};\n }\n parent = parent[propertyName];\n }\n }\n return parent;\n}\n\nfunction getOperationArgumentValueFromParameter(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameter: OperationParameter,\n serializer: Serializer\n): any {\n return getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n parameter.parameterPath,\n parameter.mapper,\n serializer\n );\n}\n\nexport function getOperationArgumentValueFromParameterPath(\n serviceClient: ServiceClient,\n operationArguments: OperationArguments,\n parameterPath: ParameterPath,\n parameterMapper: Mapper,\n serializer: Serializer\n): any {\n let value: any;\n if (typeof parameterPath === \"string\") {\n parameterPath = [parameterPath];\n }\n if (Array.isArray(parameterPath)) {\n if (parameterPath.length > 0) {\n if (parameterMapper.isConstant) {\n value = parameterMapper.defaultValue;\n } else {\n let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath(\n operationArguments,\n parameterPath\n );\n if (!propertySearchResult.propertyFound) {\n propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath);\n }\n\n let useDefaultValue = false;\n if (!propertySearchResult.propertyFound) {\n useDefaultValue =\n parameterMapper.required ||\n (parameterPath[0] === \"options\" && parameterPath.length === 2);\n }\n value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue;\n }\n\n // Serialize just for validation purposes.\n const parameterPathString: string = getPathStringFromParameterPath(\n parameterPath,\n parameterMapper\n );\n serializer.serialize(parameterMapper, value, parameterPathString);\n }\n } else {\n if (parameterMapper.required) {\n value = {};\n }\n\n for (const propertyName in parameterPath) {\n const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![\n propertyName\n ];\n const propertyPath: ParameterPath = parameterPath[propertyName];\n const propertyValue: any = getOperationArgumentValueFromParameterPath(\n serviceClient,\n operationArguments,\n propertyPath,\n propertyMapper,\n serializer\n );\n // Serialize just for validation purposes.\n const propertyPathString: string = getPathStringFromParameterPath(\n propertyPath,\n propertyMapper\n );\n serializer.serialize(propertyMapper, propertyValue, propertyPathString);\n if (propertyValue !== undefined) {\n if (!value) {\n value = {};\n }\n value[propertyName] = propertyValue;\n }\n }\n }\n return value;\n}\n\ninterface PropertySearchResult {\n propertyValue?: any;\n propertyFound: boolean;\n}\n\nfunction getPropertyFromParameterPath(\n parent: { [parameterName: string]: any },\n parameterPath: string[]\n): PropertySearchResult {\n const result: PropertySearchResult = { propertyFound: false };\n let i = 0;\n for (; i < parameterPath.length; ++i) {\n const parameterPathPart: string = parameterPath[i];\n // Make sure to check inherited properties too, so don't use hasOwnProperty().\n if (parent != undefined && parameterPathPart in parent) {\n parent = parent[parameterPathPart];\n } else {\n break;\n }\n }\n if (i === parameterPath.length) {\n result.propertyValue = parent;\n result.propertyFound = true;\n }\n return result;\n}\n\nexport function flattenResponse(\n _response: HttpOperationResponse,\n responseSpec: OperationResponse | undefined\n): RestResponse {\n const parsedHeaders = _response.parsedHeaders;\n const bodyMapper = responseSpec && responseSpec.bodyMapper;\n\n const addOperationResponse = (obj: {}) =>\n Object.defineProperty(obj, \"_response\", {\n value: _response,\n });\n\n if (bodyMapper) {\n const typeName = bodyMapper.type.name;\n if (typeName === \"Stream\") {\n return addOperationResponse({\n ...parsedHeaders,\n blobBody: _response.blobBody,\n readableStreamBody: _response.readableStreamBody,\n });\n }\n\n const modelProperties =\n (typeName === \"Composite\" && (bodyMapper as CompositeMapper).type.modelProperties) || {};\n const isPageableResponse = Object.keys(modelProperties).some(\n (k) => modelProperties[k].serializedName === \"\"\n );\n if (typeName === \"Sequence\" || isPageableResponse) {\n // We're expecting a sequece(array) make sure that the response body is in the\n // correct format, if not make it an empty array []\n const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : [];\n const arrayResponse = [...parsedBody] as RestResponse & any[];\n\n for (const key of Object.keys(modelProperties)) {\n if (modelProperties[key].serializedName) {\n arrayResponse[key] = _response.parsedBody[key];\n }\n }\n\n if (parsedHeaders) {\n for (const key of Object.keys(parsedHeaders)) {\n arrayResponse[key] = parsedHeaders[key];\n }\n }\n addOperationResponse(arrayResponse);\n return arrayResponse;\n }\n\n if (typeName === \"Composite\" || typeName === \"Dictionary\") {\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n }\n }\n\n if (\n bodyMapper ||\n _response.request.method === \"HEAD\" ||\n utils.isPrimitiveType(_response.parsedBody)\n ) {\n // primitive body types and HEAD booleans\n return addOperationResponse({\n ...parsedHeaders,\n body: _response.parsedBody,\n });\n }\n\n return addOperationResponse({\n ...parsedHeaders,\n ..._response.parsedBody,\n });\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpOperationResponse } from \"../httpOperationResponse\";\nimport { WebResourceLike } from \"../webResource\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptionsLike,\n} from \"./requestPolicy\";\n\nexport function logPolicy(logger: any = console.log): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => {\n return new LogPolicy(nextPolicy, options, logger);\n },\n };\n}\n\nexport class LogPolicy extends BaseRequestPolicy {\n logger?: any;\n\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptionsLike,\n logger: any = console.log\n ) {\n super(nextPolicy, options);\n this.logger = logger;\n }\n\n public sendRequest(request: WebResourceLike): Promise {\n return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response));\n }\n}\n\nfunction logResponse(\n policy: LogPolicy,\n response: HttpOperationResponse\n): Promise {\n policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`);\n policy.logger(`>> Response status code: ${response.status}`);\n const responseBody = response.bodyAsText;\n policy.logger(`>> Body: ${responseBody}`);\n return Promise.resolve(response);\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Bearer\";\n\n/**\n * A credentials object that uses a token string and a authorzation scheme to authenticate.\n */\nexport class TokenCredentials implements ServiceClientCredentials {\n token: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new TokenCredentials object.\n *\n * @constructor\n * @param {string} token The token.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) {\n if (!token) {\n throw new Error(\"token cannot be null or undefined.\");\n }\n this.token = token;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @return {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(\n HeaderConstants.AUTHORIZATION,\n `${this.authorizationScheme} ${this.token}`\n );\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport * as base64 from \"../util/base64\";\nimport { Constants } from \"../util/constants\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\nconst HeaderConstants = Constants.HeaderConstants;\nconst DEFAULT_AUTHORIZATION_SCHEME = \"Basic\";\n\nexport class BasicAuthenticationCredentials implements ServiceClientCredentials {\n userName: string;\n password: string;\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME;\n\n /**\n * Creates a new BasicAuthenticationCredentials object.\n *\n * @constructor\n * @param {string} userName User name.\n * @param {string} password Password.\n * @param {string} [authorizationScheme] The authorization scheme.\n */\n constructor(\n userName: string,\n password: string,\n authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME\n ) {\n if (userName === null || userName === undefined || typeof userName.valueOf() !== \"string\") {\n throw new Error(\"userName cannot be null or undefined and must be of type string.\");\n }\n if (password === null || password === undefined || typeof password.valueOf() !== \"string\") {\n throw new Error(\"password cannot be null or undefined and must be of type string.\");\n }\n this.userName = userName;\n this.password = password;\n this.authorizationScheme = authorizationScheme;\n }\n\n /**\n * Signs a request with the Authentication header.\n *\n * @param {WebResourceLike} webResource The WebResourceLike to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike) {\n const credentials = `${this.userName}:${this.password}`;\n const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`;\n if (!webResource.headers) webResource.headers = new HttpHeaders();\n webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials);\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { HttpHeaders } from \"../httpHeaders\";\nimport { WebResourceLike } from \"../webResource\";\nimport { ServiceClientCredentials } from \"./serviceClientCredentials\";\n\n/**\n * @interface ApiKeyCredentialOptions\n * Describes the options to be provided while creating an instance of ApiKeyCredentials\n */\nexport interface ApiKeyCredentialOptions {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n inQuery?: { [x: string]: any };\n}\n\n/**\n * Authenticates to a service using an API key.\n */\nexport class ApiKeyCredentials implements ServiceClientCredentials {\n /**\n * A key value pair of the header parameters that need to be applied to the request.\n */\n private readonly inHeader?: { [x: string]: any };\n /**\n * A key value pair of the query parameters that need to be applied to the request.\n */\n private readonly inQuery?: { [x: string]: any };\n\n /**\n * @constructor\n * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided.\n */\n constructor(options: ApiKeyCredentialOptions) {\n if (!options || (options && !options.inHeader && !options.inQuery)) {\n throw new Error(\n `options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided.`\n );\n }\n this.inHeader = options.inHeader;\n this.inQuery = options.inQuery;\n }\n\n /**\n * Signs a request with the values provided in the inHeader and inQuery parameter.\n *\n * @param {WebResource} webResource The WebResource to be signed.\n * @returns {Promise} The signed request object.\n */\n signRequest(webResource: WebResourceLike): Promise {\n if (!webResource) {\n return Promise.reject(\n new Error(`webResource cannot be null or undefined and must be of type \"object\".`)\n );\n }\n\n if (this.inHeader) {\n if (!webResource.headers) {\n webResource.headers = new HttpHeaders();\n }\n for (const headerName in this.inHeader) {\n webResource.headers.set(headerName, this.inHeader[headerName]);\n }\n }\n\n if (this.inQuery) {\n if (!webResource.url) {\n return Promise.reject(new Error(`url cannot be null in the request object.`));\n }\n if (webResource.url.indexOf(\"?\") < 0) {\n webResource.url += \"?\";\n }\n for (const key in this.inQuery) {\n if (!webResource.url.endsWith(\"?\")) {\n webResource.url += \"&\";\n }\n webResource.url += `${key}=${this.inQuery[key]}`;\n }\n }\n\n return Promise.resolve(webResource);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class TopicCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid TopicCredentials object.\n *\n * @constructor\n * @param {string} topicKey The EventGrid topic key\n */\n constructor(topicKey: string) {\n if (!topicKey || (topicKey && typeof topicKey !== \"string\")) {\n throw new Error(\"topicKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": topicKey,\n },\n };\n super(options);\n }\n}\n","// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License. See License.txt in the project root for license information.\n\nimport { ApiKeyCredentials, ApiKeyCredentialOptions } from \"./apiKeyCredentials\";\n\nexport class DomainCredentials extends ApiKeyCredentials {\n /**\n * Creates a new EventGrid DomainCredentials object.\n *\n * @constructor\n * @param {string} domainKey The EventGrid domain key\n */\n constructor(domainKey: string) {\n if (!domainKey || (domainKey && typeof domainKey !== \"string\")) {\n throw new Error(\"domainKey cannot be null or undefined and must be of type string.\");\n }\n const options: ApiKeyCredentialOptions = {\n inHeader: {\n \"aeg-sas-key\": domainKey,\n },\n };\n super(options);\n }\n}\n"],"names":["uuidv4","base64.decodeString","base64.encodeByteArray","utils.isValidUuid","utils.isDuration","__extends","Transform","tunnel.httpsOverHttps","tunnel.httpsOverHttp","tunnel.httpOverHttps","tunnel.httpOverHttp","tough.CookieJar","https.Agent","http.Agent","HttpPipelineLogLevel","xml2js.Builder","xml2js.Parser","utils.stripRequest","utils.stripResponse","utils\n .delay","utils.generateUuid","os.arch","os.type","os.release","utils\n .delay","retry","shouldRetry","updateRetryData","utils.delay","QueryCollectionFormat","MSRestConstants","DefaultHttpClient","utils.prepareXMLRootList","__spreadArrays","utils.isPrimitiveType","DEFAULT_AUTHORIZATION_SCHEME","HeaderConstants","base64.encodeString"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA;AACA;AAEA;;;AAGA,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;SA4Ee,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;IAME,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;KACF;;;;;;;IAQM,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;KACH;;;;;;IAOM,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,GAAG,SAAS,GAAG,MAAM,CAAC,KAAK,CAAC;KAC3C;;;;IAKM,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;KACrD;;;;;;IAOM,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;KACf;;;;IAKM,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;KAChB;;;;IAKM,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;KACpB;;;;IAKM,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;KACrB;;;;IAKM,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;KAC1B;;;;IAKM,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;KACtC;;;;IAKM,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;KAC3C;IACH,kBAAC;AAAD,CAAC;;ACrOD;AACA;AAEA;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;;AAIA,SAAgB,eAAe,CAAC,KAAiB;;;IAG/C,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,GAAG,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;;AAIA,SAAgB,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC;;AC5BD;AACA;AAEA,IAAa,SAAS,GAAG;;;;;;IAMvB,aAAa,EAAE,OAAO;;;;;;;IAQtB,IAAI,EAAE,OAAO;;;;;;;IAQb,KAAK,EAAE,QAAQ;;;;;;;IAQf,UAAU,EAAE,YAAY;;;;;;;IAQxB,WAAW,EAAE,aAAa;;;;IAK1B,QAAQ,EAAE,UAAU;;;;IAKpB,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;;;;;;;QAOb,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;;;;IAKD,eAAe,EAAE;;;;;;;QAOf,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;;;;;;;;;QAU9B,WAAW,EAAE,aAAa;;;;;;;QAQ1B,UAAU,EAAE,YAAY;KACzB;CACF;;AC3GD;AACA,AAQA;;;AAGA,IAAa,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B,AAUA;;;;;;AAMA,SAAgB,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;;AAQA,SAAgB,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;;AAQA,SAAgB,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;;AAOA,SAAgB,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,AA4BA;;;;;AAKA,SAAgB,YAAY;IAC1B,OAAOA,OAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;;AAWA,SAAgB,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;KACtC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,AAeA;;;;;;AAMA,SAAgB,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,GAAA,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;;AAMA,SAAgB,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;SACrB,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;SACT,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED;;;;;AAKA,SAAgB,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;SAC3E,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;SAC3B,CACF,CAAC;KACH,CAAC;AACJ,CAAC;AAED,SAAgB,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;;AAKA,SAAgB,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;SAC1D,CAAC,CAAC;KACJ,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;;AAKA,SAAgB,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;;AAOA,SAAgB,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;;AAMA,SAAgB,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,KAAK,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC;;AC/RD;AACA;IAME,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;KAC7B;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;SACH,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAA,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;KACF;;;;;;;;;;;;IAaD,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;;;;;;;;;;QAYO,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;;YAEL,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;KAChB;;;;;;;;;;;;IAaD,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;;;gBAIzE,YAAY,GAAG,EAAE,CAAC;aACnB;;YAED,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;;;;;;gBAMd,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAGC,YAAmB,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;KAChB;IACH,iBAAC;AAAD,CAAC,IAAA;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,EAAE,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;;IAED,IAAM,GAAG,GAAGC,eAAsB,CAAC,MAAM,CAAC,CAAC;;IAE3C,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;;IAED,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;;IAElD,OAAOD,YAAmB,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,EAAE,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAIE,WAAiB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,EAAE,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,EAAE,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;KACvB,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAGD,eAAsB,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,EAAE,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;sBACjB,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;sBACpC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,GAAG,KAAK,CAAC,WAAW,EAAE,GAAG,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,EACE,KAAK,YAAY,IAAI;iBACpB,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAACE,UAAgB,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;;AAKA,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;sBAChC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;sBAChD,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;;;;wBAIjC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,GAAA,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;;YAEL,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;;YAEvB,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;;;;;;;;;;YAUtE,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;;YAEpB,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;;;gBAGF,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;SACb,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;;IAGlB,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;;YAEhC,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;sBAC3B,kBAAkB;sBAClB,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,QACE,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,EACpE;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,QACE,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,EAC/D;AACJ,CAAC;AAoHD;AACA,SAAgB,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAGF,eAAsB,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;;AAGA,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,IAAa,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC;;ACtiCF;AACA,SAgKgB,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;AAQA;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,GAAG,OAAO,GAAG,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;KACpC;;;;;;IAOD,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;KACF;;;;;;IAOD,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ;aACvF,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;;QAGD,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;;QAGD,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;;QAG1D,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;iBACN,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,GAAG,GAAG,CAAC;iBACjC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,GAAG,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,EAAE,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;iBACF,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;;QAGD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;;YAED,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;;YAED,IAAM,WAAW,GAAG,EAAE,CAAC;;YAEvB,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF;;YAED,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;;QAGD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;;QAED,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;;QAGD,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;;QAGD,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;;YAE7B,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;KACb;;;;;IAMD,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;KACf;IACH,kBAAC;AAAD,CAAC;;;;;;;AChhBD;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASM,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQM,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASM,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASM,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASM,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;AC1WD;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;AC5DD;AACA;IAK+BG,mCAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;KAClD;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,CA3B8B,KAAK;;ACNpC;AACA,AAgCA;IAAA;KAwMC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;6BACF,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;6BACzB,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;;gCAE9C,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;6BACF,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;;oCAEL,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;8BACvB,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;kCACpC,WAAW,CAAC,IAAI,EAAE;kCAClB,WAAW,CAAC,IAAI;8BAClB,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAIC,gBAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;iCAC5B;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,oBACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;kCAC5C,QAAQ,CAAC,IAA0C;kCACrD,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAIA,gBAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;qCAC5B;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;;oCAEV,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;;wBAGjB,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;6BACR,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,KAAO,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,IAAA;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;KAC7B,CAAC,CAAC;AACL,CAAC;AAED,SAAgB,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;KAC7B,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC;;AC/PD;AACA,AAMA;;;AAGA;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;KAqHtF;;;;IAhHQ,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;KAC/C;;;;;;IAOM,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,GAAG,cAAc,GAAG,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;KACF;;;;;IAMM,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,SAAS,CAAC;KAClE;;;;IAKM,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;KACf;;;;IAKa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY;oBAClB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB;4BACtB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;KACf;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;AAGA;IAAA;KAiPC;;;;;IAtOQ,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;KACF;;;;IAKM,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;KACrB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;;IAMM,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;;;gBAGvD,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;KACF;;;;;IAMM,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;KACF;;;;IAKM,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;;;;IAKM,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;KACF;;;;;;IAOM,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;KACF;;;;;IAMM,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC;KACtE;;;;IAKM,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,GAAG,SAAS,CAAC;KACzD;;;;IAKO,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI;oBAChB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;KACF;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;KACf;;;;;IAMM,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;KACF;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;KACf;IACH,iBAAC;AAAD,CAAC,IAAA;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;KAAI;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;KACrC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;KACnC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;KACpC;IACH,eAAC;AAAD,CAAC,IAAA;AAED;;;;AAIA,SAAgB,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,QACE,CAAC,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE;SACpD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC;SACrD,EAAE,cAAc,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,YACvD;AACJ,CAAC;AAED;;;AAGA;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,GAAG,KAAK,GAAG,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;KACxB;;;;;IAMM,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;;;;IAKM,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa;gBACxB,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;KAC7B;IACH,mBAAC;AAAD,CAAC,IAAA;AAED;;;AAGA,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;;AAGA,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;;AAIA,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;;AAIA,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;;AAIA,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;AAIA,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,GAAA,CAAC,CAAC;AACzF,CAAC;AAED;;;;AAIA,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC;;AClpBD;AACA,SAWgB,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,KAAK,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,SAAgB,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAOC,qBAAqB,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAOC,oBAAoB,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAOC,mBAAmB,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC;;AChFD;AACA;IAiByCL,6CAAe;IAAxD;QAAA,qEA4EC;QA3EkB,eAAS,GAAG,IAAIM,eAAe,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;;KA2ElF;IAzEO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;;;;wBACzC,WAAW,GAA2C,EAAE,CAAC;8BAE3D,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA,EAApD,wBAAoD;wBACjC,qBAAM,IAAI,OAAO,CAAS,UAAC,OAAO,EAAE,MAAM;gCAC7D,KAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,MAAM;oCAC3D,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,CAAC,MAAM,CAAC,CAAC;qCACjB;iCACF,CAAC,CAAC;6BACJ,CAAC,EAAA;;wBARI,YAAY,GAAG,SAQnB;wBAEF,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;;;wBAGlD,IAAI,WAAW,CAAC,aAAa,EAAE;4BACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;4BACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gCACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;6BAChC;iCAAM,IAAI,SAAS,EAAE;gCACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;6BAC/B;yBACF;6BAAM,IAAI,WAAW,CAAC,aAAa,EAAE;4BAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;4BACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;yBAClC;wBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;4BAClC,IAAI,WAAW,CAAC,KAAK,EAAE;gCACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;6BACpC;iCAAM;gCACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;gCACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;sCAC7C,IAAIC,WAAW,CAAC,OAAO,CAAC;sCACxB,IAAIC,UAAU,CAAC,OAAO,CAAC,CAAC;gCAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;6BAC3B;yBACF;wBAED,sBAAO,WAAW,EAAC;;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,iBAAwC;;;;;;;6BACvD,IAAI,CAAC,SAAS,EAAd,wBAAc;wBACV,oBAAkB,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;8BAChE,iBAAe,IAAI,SAAS,CAAA,EAA5B,wBAA4B;wBAC9B,qBAAM,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gCAChC,KAAI,CAAC,SAAU,CAAC,SAAS,CACvB,iBAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,UAAC,GAAG;oCACF,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,EAAE,CAAC;qCACX;iCACF,CACF,CAAC;6BACH,CAAC,EAAA;;wBAbF,SAaE,CAAC;;;;;;KAGR;IACH,0BAAC;AAAD,CA5EA,CAAyC,eAAe;;AClBxD;AACA,AAKA,WAAY,oBAAoB;;;;IAI9B,6DAAG,CAAA;;;;IAKH,iEAAK,CAAA;;;;IAKL,qEAAO,CAAA;;;;IAKP,+DAAI,CAAA;AACN,CAAC,EApBWC,4BAAoB,KAApBA,4BAAoB,QAoB/B;;AC1BD;AACA;AA2EA;;;;;AAKA,SAAgB,iBAAiB,CAAC,UAAmB;;;;;;IAMnD,MAAM,cAAc,GAAG,UAGtB,CAAC;IACF,QACE,cAAc;QACd,OAAO,cAAc,CAAC,QAAQ,KAAK,UAAU;SAC5C,cAAc,CAAC,WAAW,KAAK,SAAS,IAAI,cAAc,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC,EAChF;AACJ,CAAC;;AChGD;AACA;AAkDA;;;;;AAKA,SAAgB,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,SAAgB,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACzED;AACA,SAmFgB,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;;ACjGD;AACA,SAIgB,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAIC,cAAc,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,EAAE,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAgB,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAIC,aAAa,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAQ,EAAE,GAAQ;gBAC5C,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;aACF,CAAC,CAAC;SACJ;KACF,CAAC,CAAC;AACL,CAAC;;AClCD;AACA;IAmBE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;KACzC;;;;;;IASG,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;KAC1C;;;;;;;IAQM,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;KACtC;IACH,wBAAC;AAAD,CAAC,IAAA;AAsBD;;;AAGA;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;KAAI;;;;;;IAO7C,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,QACE,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAKF,4BAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EACxC;KACH;;;;;;;IAQM,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;KACF;IACH,2BAAC;AAAD,CAAC;;ACjGD;AACA,AAmCA;;;;AAIA,SAAgB,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED,AAAO,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,AAAO,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;;AAIA;IAA2CT,+CAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,KAAK,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,KAAK,sBAAsB,CAAC;;KAC9F;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;qBAAA,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAxBA,CAA2C,iBAAiB,GAwB3D;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;qBAC/B,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;sBAC1D,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;sBACrC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;8BAChE,6BAA2B,UAAY;8BACtC,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAGY,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;kDACnC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;kDAC9D,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;sCAClC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;sCAC3D,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAGD,YAAkB,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAGC,aAAmB,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;;wBAE9C,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;KACxC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;KAC1B,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;cAC5C,EAAE;cACF,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,GAAA,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;aAC5B,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,GAAA,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;aAC1B,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC;;ACrSD;AACA,SAyBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;;AAIA;IAA4Cb,gDAAiB;;;;;;;;;;IA2B3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;SAC9B;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,GAAG,UAAU,GAAG,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,GAAG,aAAa,GAAG,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;cAC9C,gBAAgB;cAChB,iCAAiC,CAAC;;KACvC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAC7E;IACH,6BAAC;AAAD,CAvDA,CAA4C,iBAAiB,GAuD5D;AAED;;;;;;;;AAQA,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;SACtB,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;;AAOA,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAOc,KACC,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,GAAA,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,GAAA,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,GAAA,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;;QAEjD,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC;;AC3ND;AACA,SAYgB,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;SACpF;KACF,CAAC;AACJ,CAAC;AAED;IAAmDd,uDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;KAGrC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAEe,YAAkB,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,oCAAC;AAAD,CAfA,CAAmD,iBAAiB,GAenE;;ACtCD;AACA,SAMgB,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,SAAgB,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAIC,OAAO,EAAE,SAAIC,OAAO,EAAE,SAAIC,UAAU,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC;;ACvBD;AACA,AAgBA,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,GAAG,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,GAAG,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;KAC9B,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,AAAO,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,SAAgB,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAgB,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,GAAG,sBAAsB,EAAE,GAAG,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;UAC9C,wBAAwB,EAAE;UAC1B,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAqClB,yCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;KAG9B;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;KACF;IACH,sBAAC;AAAD,CAxBA,CAAqC,iBAAiB,GAwBrD;;ACvFD;AACA,AA4BO,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,SAAgB,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;SAChE;KACF,CAAC;AACJ,CAAC;AAED;IAAoCA,wCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;KAGzB;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,GAAA,CAAC,CAAC;KAC1D;IACH,qBAAC;AAAD,CAdA,CAAoC,iBAAiB,GAcpD;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;SACb,MAAM,KAAK,GAAG;aACZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;aACnE,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;SAChB,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa;aAC5E,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;;;;QAKjC,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,KAAK,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,GAAA,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;;;IAGvE,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;;SC5Fe,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;SACpE;KACF,CAAC;AACJ,CAAC;AAED;IAA0CA,8CAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;KAG5B;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,2BAAC;AAAD,CAdA,CAA0C,iBAAiB,GAc1D;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,QACE,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;;;;iBAI3C,KAAK,CAAC,cAAM,OAAA,KAAK,GAAA,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;;;oBAGtB,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEe,YAAkB,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;aACjB,CAAC,EACJ;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;AAMA,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;;;IAID,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAEA,YAAkB,EAAE,CAAC,CAAC;;IAGvE,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;;AAMA,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;;SAEb;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;AAMA,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;;AASA,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;KAC/D,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;;AASA,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAOI,KACC,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,GAAA,CAAC,CAAC;SACpE;KACF,CAAC,CAAC;AACL,CAAC;;ACpMD;AACA,SAYgB,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;SACvE;KACF,CAAC;AACJ,CAAC;AAED;IAAmCnB,uCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;KAGxD;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KACzD;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;SAAA,CAC1C,CAAC;KACH;IACH,oBAAC;AAAD,CAlBA,CAAmC,iBAAiB,GAkBnD;;ACzCD;AACA,SAwBgB,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;SACH;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;;AAUA;IAA4CA,gDAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,GAAG,UAAU,GAAG,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,GAAG,aAAa,GAAG,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;kBAChC,gBAAgB;kBAChB,KAAI,CAAC,iCAAiC,CAAC;;KAC9C;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAAoB,OAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAA,CAAC,CAAC;KAClE;IACH,6BAAC;AAAD,CArCA,CAA4C,iBAAiB,GAqC5D;AAED;;;;;;;AAOA,SAASC,aAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;AAMA,SAASC,iBAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;;IAGD,SAAS,CAAC,UAAU,EAAE,CAAC;;IAGvB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAeF,OAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAGE,iBAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;0BAElD,GAAG;wBACH,GAAG,CAAC,IAAI;wBACRD,aAAW,CAAC,MAAM,EAAE,SAAS,CAAC;yBAC7B,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAME,KAAW,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAOH,OAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;;wBAEP,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B;;AC1LD;AACA,AAKA,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANWI,6BAAqB,KAArBA,6BAAqB,QAMhC;;ACZD;AACA,SAYgB,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCxB,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvCD;AACA,AAcA;;;AAGA,AAAO,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;;AAGA,SAAgB,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED;AACA;AACA;AACA,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;;;YAGtB,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;AAGA,SAAgB,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,GAAA,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,GAAA,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;AAGA,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,GAAG,WAAW,GAAG,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,GAAG,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,GAAG,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,GAAG,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,SAAgB,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,GAAG,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,SAAgB,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;SAC7D;KACF,CAAC;AACJ,CAAC;AAED;IAAiCA,qCAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;KACpC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;IACH,kBAAC;AAAD,CAlBA,CAAiC,iBAAiB,GAkBjD;;ACvKD;AACA,AAaA,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,SAAgB,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;SACnE;KACF,CAAC;AACJ,CAAC;AAED;;;;;;AAMA;IAA2CA,+CAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;KAC9B;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;qBAC7C,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;8BAEE,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;KACF;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;KACF;IACH,4BAAC;AAAD,CA7DA,CAA2C,iBAAiB,GA6D3D;;ACxGD;AACA,AASA,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA,AAAO,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;;AAIA;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrByB,SAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC;;AC3DD;AACA,AA+JA;;;;AAIA;;;;;;;IAgCE,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAIC,mBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;KACvD;;;;IAKD,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;KAC9C;;;;;;;IAQD,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAKF,6BAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,GAAG,EAAE,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAKA,6BAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;aAAA,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;;iBAEH,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,GAAA,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,GAAA,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;KACf;IACH,oBAAC;AAAD,CAAC,IAAA;SAEe,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7BG,kBAAwB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,qCAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID,AAkBA,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,SAAgB,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;6BACvB,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,GAAG,eAAe,CAAC,YAAY,GAAG,oBAAoB,CAAC,aAAa,CAAC;aAC7F;;YAGD,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;;YAEF,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;;QAEnD,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAgB,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;KAAA,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,mCACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,KAAK,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,GAAA,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;;;YAGjD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,GAAG,SAAS,CAAC,UAAU,GAAG,EAAE,CAAC;YACnF,IAAM,aAAa,GAAGC,qBAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnCC,eAAqB,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;;QAEA,OAAO,oBAAoB,mCACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,mCACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC;;ACn1BD;AACA,SAWgB,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;SACnD;KACF,CAAC;AACJ,CAAC;AAED;IAA+B7B,mCAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;KACtB;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,GAAA,CAAC,CAAC;KAC9F;IACH,gBAAC;AAAD,CAfA,CAA+B,iBAAiB,GAe/C;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;;AC9CD;AACA,AAOA,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM8B,8BAA4B,GAAG,QAAQ,CAAC;AAE9C;;;AAGA;;;;;;;;IAWE,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QATrF,wBAAmB,GAAWA,8BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,uBAAC;AAAD,CAAC;;AC/CD;AACA,AAOA,IAAMC,iBAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAMD,8BAA4B,GAAG,OAAO,CAAC;AAE7C;;;;;;;;;IAaE,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,oDAA0D;QAb5D,wBAAmB,GAAWA,8BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;KAChD;;;;;;;IAQD,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAIE,YAAmB,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAACD,iBAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,qCAAC;AAAD,CAAC;;ACrDD;AACA,AAqBA;;;AAGA;;;;;IAcE,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,KAAK,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;KAChC;;;;;;;IAQD,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;KACrC;IACH,wBAAC;AAAD,CAAC;;ACxFD;AACA;IAIsC/B,0CAAiB;;;;;;;IAOrD,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,KAAK,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,uBAAC;AAAD,CAlBA,CAAsC,iBAAiB;;ACLvD;AACA;IAIuCA,2CAAiB;;;;;;;IAOtD,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,KAAK,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;KAChB;IACH,wBAAC;AAAD,CAlBA,CAAuC,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/dom-shim.d.ts b/node_modules/@azure/ms-rest-js/dom-shim.d.ts new file mode 100644 index 0000000000..cb4ac4d933 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/dom-shim.d.ts @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +// d.ts shims provide types for things we use internally but are not part +// of this package's surface area. + +interface Request {} +interface RequestInit {} +interface Response {} +interface Headers {} diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts new file mode 100644 index 0000000000..8fa67fd51f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts @@ -0,0 +1,9 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise>; + processRequest(_operationResponse: HttpOperationResponse): Promise; + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +//# sourceMappingURL=browserFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map new file mode 100644 index 0000000000..a3ae8cab53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,sBAAuB,SAAQ,eAAe;IACzD,cAAc,CAAC,YAAY,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAI5E,cAAc,CAAC,kBAAkB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;IAIxE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAGnF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js new file mode 100644 index 0000000000..eea907fca6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { FetchHttpClient, } from "./fetchHttpClient"; +var BrowserFetchHttpClient = /** @class */ (function (_super) { + __extends(BrowserFetchHttpClient, _super); + function BrowserFetchHttpClient() { + return _super !== null && _super.apply(this, arguments) || this; + } + BrowserFetchHttpClient.prototype.prepareRequest = function (_httpRequest) { + return Promise.resolve({}); + }; + BrowserFetchHttpClient.prototype.processRequest = function (_operationResponse) { + return Promise.resolve(); + }; + BrowserFetchHttpClient.prototype.fetch = function (input, init) { + return fetch(input, init); + }; + return BrowserFetchHttpClient; +}(FetchHttpClient)); +export { BrowserFetchHttpClient }; +//# sourceMappingURL=browserFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map new file mode 100644 index 0000000000..a47796d8af --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/browserFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"browserFetchHttpClient.js","sourceRoot":"","sources":["../../lib/browserFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAI3B;IAA4C,0CAAe;IAA3D;;IAYA,CAAC;IAXC,+CAAc,GAAd,UAAe,YAA6B;QAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;IAC7B,CAAC;IAED,+CAAc,GAAd,UAAe,kBAAyC;QACtD,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;IAC3B,CAAC;IAED,sCAAK,GAAL,UAAM,KAAwB,EAAE,IAAwB;QACtD,OAAO,KAAK,CAAC,KAAK,EAAE,IAAI,CAAC,CAAC;IAC5B,CAAC;IACH,6BAAC;AAAD,CAAC,AAZD,CAA4C,eAAe,GAY1D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts new file mode 100644 index 0000000000..f566c6f0c9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts @@ -0,0 +1,46 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { + [x: string]: any; + }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { + [x: string]: any; + }; +} +/** + * Authenticates to a service using an API key. + */ +export declare class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?; + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions); + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=apiKeyCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map new file mode 100644 index 0000000000..6a91bfcc27 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE;;;GAGG;AACH,MAAM,WAAW,uBAAuB;IACtC;;OAEG;IACH,QAAQ,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAChC;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;CAChC;AAED;;GAEG;AACH,qBAAa,iBAAkB,YAAW,wBAAwB;IAChE;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAuB;IACjD;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAuB;IAEhD;;;OAGG;gBACS,OAAO,EAAE,uBAAuB;IAU5C;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;CAiCpE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js new file mode 100644 index 0000000000..587a24af1d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js @@ -0,0 +1,56 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +/** + * Authenticates to a service using an API key. + */ +var ApiKeyCredentials = /** @class */ (function () { + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + function ApiKeyCredentials(options) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error("options cannot be null or undefined. Either \"inHeader\" or \"inQuery\" property of the options object needs to be provided."); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + ApiKeyCredentials.prototype.signRequest = function (webResource) { + if (!webResource) { + return Promise.reject(new Error("webResource cannot be null or undefined and must be of type \"object\".")); + } + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (var headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error("url cannot be null in the request object.")); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (var key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += key + "=" + this.inQuery[key]; + } + } + return Promise.resolve(webResource); + }; + return ApiKeyCredentials; +}()); +export { ApiKeyCredentials }; +//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map new file mode 100644 index 0000000000..7f3fc82d59 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/apiKeyCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"apiKeyCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/apiKeyCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAmB7C;;GAEG;AACH;IAUE;;;OAGG;IACH,2BAAY,OAAgC;QAC1C,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YAClE,MAAM,IAAI,KAAK,CACb,8HAA0H,CAC3H,CAAC;SACH;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACH,uCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,EAAE;YAChB,OAAO,OAAO,CAAC,MAAM,CACnB,IAAI,KAAK,CAAC,yEAAuE,CAAC,CACnF,CAAC;SACH;QAED,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,IAAI,CAAC,WAAW,CAAC,OAAO,EAAE;gBACxB,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACzC;YACD,KAAK,IAAM,UAAU,IAAI,IAAI,CAAC,QAAQ,EAAE;gBACtC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;aAChE;SACF;QAED,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE;gBACpB,OAAO,OAAO,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,2CAA2C,CAAC,CAAC,CAAC;aAC/E;YACD,IAAI,WAAW,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;gBACpC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;aACxB;YACD,KAAK,IAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;gBAC9B,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAClC,WAAW,CAAC,GAAG,IAAI,GAAG,CAAC;iBACxB;gBACD,WAAW,CAAC,GAAG,IAAO,GAAG,SAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAG,CAAC;aAClD;SACF;QAED,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,wBAAC;AAAD,CAAC,AA/DD,IA+DC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts new file mode 100644 index 0000000000..7cb357ee64 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts @@ -0,0 +1,20 @@ +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { WebResource } from "../webResource"; +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export declare const azureResourceManagerEndpoints: string[]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export declare class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential; + private scopes; + constructor(azureTokenCredential: TokenCredential, scopes?: string | string[]); + getToken(): Promise; + signRequest(webResource: WebResource): Promise; +} +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map new file mode 100644 index 0000000000..fff7fbdc57 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.d.ts","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAEtE,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAC;AACnD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIhD;;GAEG;AACH,eAAO,MAAM,6BAA6B,UAKzC,CAAC;AAEF;;;GAGG;AACH,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,OAAO,CAAC,oBAAoB,CAAkB;IAC9C,OAAO,CAAC,MAAM,CAAoB;gBAEhC,oBAAoB,EAAE,eAAe,EACrC,MAAM,GAAE,MAAM,GAAG,MAAM,EAA4C;IAMxD,QAAQ,IAAI,OAAO,CAAC,aAAa,CAAC;IAclC,WAAW,CAAC,WAAW,EAAE,WAAW;CAQlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js new file mode 100644 index 0000000000..3fdea9bc4d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __generator } from "tslib"; +import { Constants as MSRestConstants } from "../util/constants"; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export var azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +var AzureIdentityCredentialAdapter = /** @class */ (function () { + function AzureIdentityCredentialAdapter(azureTokenCredential, scopes) { + if (scopes === void 0) { scopes = "https://management.azure.com/.default"; } + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + AzureIdentityCredentialAdapter.prototype.getToken = function () { + return __awaiter(this, void 0, void 0, function () { + var accessToken, result; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.azureTokenCredential.getToken(this.scopes)]; + case 1: + accessToken = _a.sent(); + if (accessToken !== null) { + result = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return [2 /*return*/, result]; + } + else { + throw new Error("Could find token for scope"); + } + return [2 /*return*/]; + } + }); + }); + }; + AzureIdentityCredentialAdapter.prototype.signRequest = function (webResource) { + return __awaiter(this, void 0, void 0, function () { + var tokenResponse; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: return [4 /*yield*/, this.getToken()]; + case 1: + tokenResponse = _a.sent(); + webResource.headers.set(MSRestConstants.HeaderConstants.AUTHORIZATION, tokenResponse.tokenType + " " + tokenResponse.accessToken); + return [2 /*return*/, Promise.resolve(webResource)]; + } + }); + }); + }; + return AzureIdentityCredentialAdapter; +}()); +export { AzureIdentityCredentialAdapter }; +//# sourceMappingURL=azureIdentityTokenCredentialAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map new file mode 100644 index 0000000000..fa8bacdf19 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/azureIdentityTokenCredentialAdapter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"azureIdentityTokenCredentialAdapter.js","sourceRoot":"","sources":["../../../lib/credentials/azureIdentityTokenCredentialAdapter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,SAAS,IAAI,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAMjE,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH,MAAM,CAAC,IAAM,6BAA6B,GAAG;IAC3C,gCAAgC;IAChC,qCAAqC;IACrC,sCAAsC;IACtC,gCAAgC;CACjC,CAAC;AAEF;;;GAGG;AACH;IAGE,wCACE,oBAAqC,EACrC,MAAmE;QAAnE,uBAAA,EAAA,gDAAmE;QAEnE,IAAI,CAAC,oBAAoB,GAAG,oBAAoB,CAAC;QACjD,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAEY,iDAAQ,GAArB;;;;;4BACsB,qBAAM,IAAI,CAAC,oBAAoB,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,EAAA;;wBAAnE,WAAW,GAAG,SAAqD;wBACzE,IAAI,WAAW,KAAK,IAAI,EAAE;4BAClB,MAAM,GAAkB;gCAC5B,WAAW,EAAE,WAAW,CAAC,KAAK;gCAC9B,SAAS,EAAE,4BAA4B;gCACvC,SAAS,EAAE,WAAW,CAAC,kBAAkB;6BAC1C,CAAC;4BACF,sBAAO,MAAM,EAAC;yBACf;6BAAM;4BACL,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;yBAC/C;;;;;KACF;IAEY,oDAAW,GAAxB,UAAyB,WAAwB;;;;;4BACzB,qBAAM,IAAI,CAAC,QAAQ,EAAE,EAAA;;wBAArC,aAAa,GAAG,SAAqB;wBAC3C,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,eAAe,CAAC,aAAa,EAC1C,aAAa,CAAC,SAAS,SAAI,aAAa,CAAC,WAAa,CAC1D,CAAC;wBACF,sBAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,EAAC;;;;KACrC;IACH,qCAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts new file mode 100644 index 0000000000..c313e5257f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts @@ -0,0 +1,24 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +export declare class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string; + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(userName: string, password: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=basicAuthenticationCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map new file mode 100644 index 0000000000..59a2b8c952 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAItE,qBAAa,8BAA+B,YAAW,wBAAwB;IAC7E,QAAQ,EAAE,MAAM,CAAC;IACjB,QAAQ,EAAE,MAAM,CAAC;IACjB,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;;OAOG;gBAED,QAAQ,EAAE,MAAM,EAChB,QAAQ,EAAE,MAAM,EAChB,mBAAmB,GAAE,MAAqC;IAa5D;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAOzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js new file mode 100644 index 0000000000..e1553b0b2e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +var BasicAuthenticationCredentials = /** @class */ (function () { + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function BasicAuthenticationCredentials(userName, password, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + BasicAuthenticationCredentials.prototype.signRequest = function (webResource) { + var credentials = this.userName + ":" + this.password; + var encodedCredentials = this.authorizationScheme + " " + base64.encodeString(credentials); + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + }; + return BasicAuthenticationCredentials; +}()); +export { BasicAuthenticationCredentials }; +//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map new file mode 100644 index 0000000000..a89f01e687 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/basicAuthenticationCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"basicAuthenticationCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/basicAuthenticationCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,KAAK,MAAM,MAAM,gBAAgB,CAAC;AACzC,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAG9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,OAAO,CAAC;AAE7C;IAKE;;;;;;;OAOG;IACH,wCACE,QAAgB,EAChB,QAAgB,EAChB,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QAb5D,wBAAmB,GAAW,4BAA4B,CAAC;QAezD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,QAAQ,KAAK,IAAI,IAAI,QAAQ,KAAK,SAAS,IAAI,OAAO,QAAQ,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACzF,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,oDAAW,GAAX,UAAY,WAA4B;QACtC,IAAM,WAAW,GAAM,IAAI,CAAC,QAAQ,SAAI,IAAI,CAAC,QAAU,CAAC;QACxD,IAAM,kBAAkB,GAAM,IAAI,CAAC,mBAAmB,SAAI,MAAM,CAAC,YAAY,CAAC,WAAW,CAAG,CAAC;QAC7F,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,aAAa,EAAE,kBAAkB,CAAC,CAAC;QAC3E,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,qCAAC;AAAD,CAAC,AA1CD,IA0CC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts new file mode 100644 index 0000000000..34c7db7a02 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts @@ -0,0 +1,2 @@ +export declare type Authenticator = (challenge: object) => Promise; +//# sourceMappingURL=credentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map new file mode 100644 index 0000000000..81ab42b083 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAGA,oBAAY,aAAa,GAAG,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js new file mode 100644 index 0000000000..a9eeec2e99 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map new file mode 100644 index 0000000000..939147b7a3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/credentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"credentials.js","sourceRoot":"","sources":["../../../lib/credentials/credentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts new file mode 100644 index 0000000000..481819ca8a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string); +} +//# sourceMappingURL=domainCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map new file mode 100644 index 0000000000..0295f07f7a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,iBAAkB,SAAQ,iBAAiB;IACtD;;;;;OAKG;gBACS,SAAS,EAAE,MAAM;CAW9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js new file mode 100644 index 0000000000..51cb0ae1c4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var DomainCredentials = /** @class */ (function (_super) { + __extends(DomainCredentials, _super); + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + function DomainCredentials(domainKey) { + var _this = this; + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return DomainCredentials; +}(ApiKeyCredentials)); +export { DomainCredentials }; +//# sourceMappingURL=domainCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map new file mode 100644 index 0000000000..579d63e52f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/domainCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"domainCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/domainCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAuC,qCAAiB;IACtD;;;;;OAKG;IACH,2BAAY,SAAiB;QAA7B,iBAUC;QATC,IAAI,CAAC,SAAS,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EAAE;YAC9D,MAAM,IAAI,KAAK,CAAC,mEAAmE,CAAC,CAAC;SACtF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,SAAS;aACzB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,wBAAC;AAAD,CAAC,AAlBD,CAAuC,iBAAiB,GAkBvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts new file mode 100644 index 0000000000..af0bbacae7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts @@ -0,0 +1,11 @@ +import { WebResourceLike } from "../webResource"; +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=serviceClientCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map new file mode 100644 index 0000000000..2c091a370c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,MAAM,WAAW,wBAAwB;IACvC;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC,CAAC;CACrE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js new file mode 100644 index 0000000000..e94df45ca0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map new file mode 100644 index 0000000000..68ab158871 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/serviceClientCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClientCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/serviceClientCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts new file mode 100644 index 0000000000..95a5c310dc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts @@ -0,0 +1,25 @@ +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export declare class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string; + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme?: string); + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise; +} +//# sourceMappingURL=tokenCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map new file mode 100644 index 0000000000..85e3807c19 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAKA,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,wBAAwB,EAAE,MAAM,4BAA4B,CAAC;AAKtE;;GAEG;AACH,qBAAa,gBAAiB,YAAW,wBAAwB;IAC/D,KAAK,EAAE,MAAM,CAAC;IACd,mBAAmB,EAAE,MAAM,CAAgC;IAE3D;;;;;;OAMG;gBACS,KAAK,EAAE,MAAM,EAAE,mBAAmB,GAAE,MAAqC;IAQrF;;;;;OAKG;IACH,WAAW,CAAC,WAAW,EAAE,eAAe;CAQzC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js new file mode 100644 index 0000000000..70265d7317 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +var HeaderConstants = Constants.HeaderConstants; +var DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +var TokenCredentials = /** @class */ (function () { + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + function TokenCredentials(token, authorizationScheme) { + if (authorizationScheme === void 0) { authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; } + this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + TokenCredentials.prototype.signRequest = function (webResource) { + if (!webResource.headers) + webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, this.authorizationScheme + " " + this.token); + return Promise.resolve(webResource); + }; + return TokenCredentials; +}()); +export { TokenCredentials }; +//# sourceMappingURL=tokenCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map new file mode 100644 index 0000000000..302ef4e8a6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/tokenCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAI9C,IAAM,eAAe,GAAG,SAAS,CAAC,eAAe,CAAC;AAClD,IAAM,4BAA4B,GAAG,QAAQ,CAAC;AAE9C;;GAEG;AACH;IAIE;;;;;;OAMG;IACH,0BAAY,KAAa,EAAE,mBAA0D;QAA1D,oCAAA,EAAA,kDAA0D;QATrF,wBAAmB,GAAW,4BAA4B,CAAC;QAUzD,IAAI,CAAC,KAAK,EAAE;YACV,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;SACvD;QACD,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACH,sCAAW,GAAX,UAAY,WAA4B;QACtC,IAAI,CAAC,WAAW,CAAC,OAAO;YAAE,WAAW,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;QAClE,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,aAAa,EAC1B,IAAI,CAAC,mBAAmB,SAAI,IAAI,CAAC,KAAO,CAC5C,CAAC;QACF,OAAO,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IACtC,CAAC;IACH,uBAAC;AAAD,CAAC,AAjCD,IAiCC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts new file mode 100644 index 0000000000..eb4f1798b9 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts @@ -0,0 +1,10 @@ +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} +//# sourceMappingURL=tokenResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map new file mode 100644 index 0000000000..7636a8a356 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.d.ts","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,MAAM,WAAW,aAAa;IAC5B,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC;IAC3B,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,QAAQ,EAAE,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CAC3B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js new file mode 100644 index 0000000000..42ec1996ef --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=tokenResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map new file mode 100644 index 0000000000..b145bf0fc4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/tokenResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tokenResponse.js","sourceRoot":"","sources":["../../../lib/credentials/tokenResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts new file mode 100644 index 0000000000..059fe93577 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts @@ -0,0 +1,11 @@ +import { ApiKeyCredentials } from "./apiKeyCredentials"; +export declare class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string); +} +//# sourceMappingURL=topicCredentials.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map new file mode 100644 index 0000000000..cf4a12bfeb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.d.ts","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF,qBAAa,gBAAiB,SAAQ,iBAAiB;IACrD;;;;;OAKG;gBACS,QAAQ,EAAE,MAAM;CAW7B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js new file mode 100644 index 0000000000..bed72a9360 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { ApiKeyCredentials } from "./apiKeyCredentials"; +var TopicCredentials = /** @class */ (function (_super) { + __extends(TopicCredentials, _super); + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + function TopicCredentials(topicKey) { + var _this = this; + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + var options = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + _this = _super.call(this, options) || this; + return _this; + } + return TopicCredentials; +}(ApiKeyCredentials)); +export { TopicCredentials }; +//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map new file mode 100644 index 0000000000..267491d14a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/credentials/topicCredentials.js.map @@ -0,0 +1 @@ +{"version":3,"file":"topicCredentials.js","sourceRoot":"","sources":["../../../lib/credentials/topicCredentials.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,qBAAqB,CAAC;AAEjF;IAAsC,oCAAiB;IACrD;;;;;OAKG;IACH,0BAAY,QAAgB;QAA5B,iBAUC;QATC,IAAI,CAAC,QAAQ,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,CAAC,EAAE;YAC3D,MAAM,IAAI,KAAK,CAAC,kEAAkE,CAAC,CAAC;SACrF;QACD,IAAM,OAAO,GAA4B;YACvC,QAAQ,EAAE;gBACR,aAAa,EAAE,QAAQ;aACxB;SACF,CAAC;QACF,QAAA,kBAAM,OAAO,CAAC,SAAC;;IACjB,CAAC;IACH,uBAAC;AAAD,CAAC,AAlBD,CAAsC,iBAAiB,GAkBtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts new file mode 100644 index 0000000000..883145a45b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts @@ -0,0 +1,2 @@ +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map new file mode 100644 index 0000000000..9a0417262a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js new file mode 100644 index 0000000000..49ae427110 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; +//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map new file mode 100644 index 0000000000..d21b02ebad --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.browser.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,aAAa,IAAI,iBAAiB,EAAE,MAAM,iBAAiB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts new file mode 100644 index 0000000000..a7381a249b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts @@ -0,0 +1,2 @@ +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map new file mode 100644 index 0000000000..35b8b1fa7b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.d.ts","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js new file mode 100644 index 0000000000..a8b56aa8d4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map new file mode 100644 index 0000000000..1effd0f990 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/defaultHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"defaultHttpClient.js","sourceRoot":"","sources":["../../lib/defaultHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,mBAAmB,IAAI,iBAAiB,EAAE,MAAM,uBAAuB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts new file mode 100644 index 0000000000..eb15bc12dd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts @@ -0,0 +1,23 @@ +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type CommonRequestInfo = string; +export declare type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; +export declare type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; +export declare abstract class FetchHttpClient implements HttpClient { + sendRequest(httpRequest: WebResourceLike): Promise; + abstract prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract processRequest(operationResponse: HttpOperationResponse): Promise; + abstract fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} +export declare function parseHeaders(headers: Headers): HttpHeadersLike; +//# sourceMappingURL=fetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map new file mode 100644 index 0000000000..926678cc29 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAe,eAAe,EAAE,MAAM,eAAe,CAAC;AAU7D,oBAAY,iBAAiB,GAAG,MAAM,CAAC;AAEvC,oBAAY,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,SAAS,GAAG,QAAQ,CAAC,GAAG;IACjF,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,CAAC,EAAE,GAAG,CAAC;IACd,MAAM,CAAC,EAAE,GAAG,CAAC;CACd,CAAC;AAEF,oBAAY,cAAc,GAAG,IAAI,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS,GAAG,UAAU,CAAC,GAAG;IAC7E,IAAI,EAAE,GAAG,CAAC;IACV,OAAO,EAAE,GAAG,CAAC;IACb,QAAQ,EAAE,GAAG,CAAC;CACf,CAAC;AAEF,8BAAsB,eAAgB,YAAW,UAAU;IACnD,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;aAoMhE,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;aAC3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;aACvE,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;CAClG;AAcD,wBAAgB,YAAY,CAAC,OAAO,EAAE,OAAO,GAAG,eAAe,CAQ9D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js new file mode 100644 index 0000000000..92c646a0e2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __awaiter, __generator } from "tslib"; +import AbortController from "abort-controller"; +import FormData from "form-data"; +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Transform } from "stream"; +var FetchHttpClient = /** @class */ (function () { + function FetchHttpClient() { + } + FetchHttpClient.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var abortController, abortListener, formData, requestForm_1, appendFormValue, _i, _a, formKey, formValue, j, contentType, body, loadedBytes_1, uploadReportStream, platformSpecificRequestInit, requestInit, operationResponse, response, headers, _b, _c, onDownloadProgress_1, responseBody, loadedBytes_2, downloadReportStream, length_1, error_1, fetchError, uploadStreamDone, downloadStreamDone; + return __generator(this, function (_d) { + switch (_d.label) { + case 0: + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error("'httpRequest' (WebResource) cannot be null or undefined and must be of type object."); + } + abortController = new AbortController(); + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + abortListener = function (event) { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + if (httpRequest.timeout) { + setTimeout(function () { + abortController.abort(); + }, httpRequest.timeout); + } + if (httpRequest.formData) { + formData = httpRequest.formData; + requestForm_1 = new FormData(); + appendFormValue = function (key, value) { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (_i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + formKey = _a[_i]; + formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + httpRequest.body = requestForm_1; + httpRequest.formData = undefined; + contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm_1.getBoundary === "function") { + httpRequest.headers.set("Content-Type", "multipart/form-data; boundary=" + requestForm_1.getBoundary()); + } + else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + loadedBytes_1 = 0; + uploadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_1 += chunk.length; + httpRequest.onUploadProgress({ loadedBytes: loadedBytes_1 }); + callback(undefined, chunk); + }, + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + return [4 /*yield*/, this.prepareRequest(httpRequest)]; + case 1: + platformSpecificRequestInit = _d.sent(); + requestInit = __assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); + _d.label = 2; + case 2: + _d.trys.push([2, 8, 9, 10]); + return [4 /*yield*/, this.fetch(httpRequest.url, requestInit)]; + case 3: + response = _d.sent(); + headers = parseHeaders(response.headers); + _b = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? response.body + : undefined + }; + if (!!httpRequest.streamResponseBody) return [3 /*break*/, 5]; + return [4 /*yield*/, response.text()]; + case 4: + _c = _d.sent(); + return [3 /*break*/, 6]; + case 5: + _c = undefined; + _d.label = 6; + case 6: + operationResponse = (_b.bodyAsText = _c, + _b.redirected = response.redirected, + _b.url = response.url, + _b); + onDownloadProgress_1 = httpRequest.onDownloadProgress; + if (onDownloadProgress_1) { + responseBody = response.body || undefined; + if (isReadableStream(responseBody)) { + loadedBytes_2 = 0; + downloadReportStream = new Transform({ + transform: function (chunk, _encoding, callback) { + loadedBytes_2 += chunk.length; + onDownloadProgress_1({ loadedBytes: loadedBytes_2 }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } + else { + length_1 = parseInt(headers.get("Content-Length")) || undefined; + if (length_1) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress_1({ loadedBytes: length_1 }); + } + } + } + return [4 /*yield*/, this.processRequest(operationResponse)]; + case 7: + _d.sent(); + return [2 /*return*/, operationResponse]; + case 8: + error_1 = _d.sent(); + fetchError = error_1; + if (fetchError.code === "ENOTFOUND") { + throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); + } + else if (fetchError.type === "aborted") { + throw new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, httpRequest); + } + throw fetchError; + case 9: + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(function () { + var _a; + (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + return; + }) + .catch(function (_e) { }); + } + return [7 /*endfinally*/]; + case 10: return [2 /*return*/]; + } + }); + }); + }; + return FetchHttpClient; +}()); +export { FetchHttpClient }; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise(function (resolve) { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +export function parseHeaders(headers) { + var httpHeaders = new HttpHeaders(); + headers.forEach(function (value, key) { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map new file mode 100644 index 0000000000..4c8e9198ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/fetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fetchHttpClient.js","sourceRoot":"","sources":["../../lib/fetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,eAAe,MAAM,kBAAkB,CAAC;AAC/C,OAAO,QAAQ,MAAM,WAAW,CAAC;AAKjC,OAAO,EAAE,WAAW,EAAmB,MAAM,eAAe,CAAC;AAC7D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAY,SAAS,EAAE,MAAM,QAAQ,CAAC;AAsB7C;IAAA;IAwMA,CAAC;IAvMO,qCAAW,GAAjB,UAAkB,WAA4B;;;;;;wBAC5C,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnD,MAAM,IAAI,KAAK,CACb,qFAAqF,CACtF,CAAC;yBACH;wBAEK,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;wBAE9C,IAAI,WAAW,CAAC,WAAW,EAAE;4BAC3B,IAAI,WAAW,CAAC,WAAW,CAAC,OAAO,EAAE;gCACnC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;6BACH;4BAED,aAAa,GAAG,UAAC,KAAY;gCAC3B,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,EAAE;oCAC1B,eAAe,CAAC,KAAK,EAAE,CAAC;iCACzB;4BACH,CAAC,CAAC;4BACF,WAAW,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;yBAClE;wBAED,IAAI,WAAW,CAAC,OAAO,EAAE;4BACvB,UAAU,CAAC;gCACT,eAAe,CAAC,KAAK,EAAE,CAAC;4BAC1B,CAAC,EAAE,WAAW,CAAC,OAAO,CAAC,CAAC;yBACzB;wBAED,IAAI,WAAW,CAAC,QAAQ,EAAE;4BAClB,QAAQ,GAAQ,WAAW,CAAC,QAAQ,CAAC;4BACrC,gBAAc,IAAI,QAAQ,EAAE,CAAC;4BAC7B,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gCAC9C,0FAA0F;gCAC1F,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;oCAC/B,KAAK,GAAG,KAAK,EAAE,CAAC;iCACjB;gCACD,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oCAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iCACrD;qCAAM;oCACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iCAChC;4BACH,CAAC,CAAC;4BACF,WAA2C,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gCAAlC,OAAO;gCACV,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gCACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oCAC5B,KAAS,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wCACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qCACxC;iCACF;qCAAM;oCACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iCACrC;6BACF;4BAED,WAAW,CAAC,IAAI,GAAG,aAAW,CAAC;4BAC/B,WAAW,CAAC,QAAQ,GAAG,SAAS,CAAC;4BAC3B,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;4BAC5D,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gCACpE,IAAI,OAAO,aAAW,CAAC,WAAW,KAAK,UAAU,EAAE;oCACjD,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,cAAc,EACd,mCAAiC,aAAW,CAAC,WAAW,EAAI,CAC7D,CAAC;iCACH;qCAAM;oCACL,kEAAkE;oCAClE,WAAW,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;iCAC5C;6BACF;yBACF;wBAEG,IAAI,GAAG,WAAW,CAAC,IAAI;4BACzB,CAAC,CAAC,OAAO,WAAW,CAAC,IAAI,KAAK,UAAU;gCACtC,CAAC,CAAC,WAAW,CAAC,IAAI,EAAE;gCACpB,CAAC,CAAC,WAAW,CAAC,IAAI;4BACpB,CAAC,CAAC,SAAS,CAAC;wBACd,IAAI,WAAW,CAAC,gBAAgB,IAAI,WAAW,CAAC,IAAI,EAAE;4BAChD,gBAAc,CAAC,CAAC;4BACd,kBAAkB,GAAG,IAAI,SAAS,CAAC;gCACvC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;oCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;oCAC5B,WAAW,CAAC,gBAAiB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;oCAC/C,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;gCAC7B,CAAC;6BACF,CAAC,CAAC;4BAEH,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,IAAI,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;6BAC/B;iCAAM;gCACL,kBAAkB,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;6BAC9B;4BAED,IAAI,GAAG,kBAAkB,CAAC;yBAC3B;wBAEyD,qBAAM,IAAI,CAAC,cAAc,CACjF,WAAW,CACZ,EAAA;;wBAFK,2BAA2B,GAAyB,SAEzD;wBAEK,WAAW,cACf,IAAI,EAAE,IAAI,EACV,OAAO,EAAE,WAAW,CAAC,OAAO,CAAC,UAAU,EAAE,EACzC,MAAM,EAAE,WAAW,CAAC,MAAM,EAC1B,MAAM,EAAE,eAAe,CAAC,MAAM,EAC9B,QAAQ,EAAE,QAAQ,IACf,2BAA2B,CAC/B,CAAC;;;;wBAIiC,qBAAM,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,EAAA;;wBAAzE,QAAQ,GAAmB,SAA8C;wBAEzE,OAAO,GAAG,YAAY,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;;4BAE7C,OAAO,EAAE,OAAO;4BAChB,OAAO,EAAE,WAAW;4BACpB,MAAM,EAAE,QAAQ,CAAC,MAAM;4BACvB,kBAAkB,EAAE,WAAW,CAAC,kBAAkB;gCAChD,CAAC,CAAG,QAAQ,CAAC,IAA0C;gCACvD,CAAC,CAAC,SAAS;;6BACD,CAAC,WAAW,CAAC,kBAAkB,EAA/B,wBAA+B;wBAAG,qBAAM,QAAQ,CAAC,IAAI,EAAE,EAAA;;wBAArB,KAAA,SAAqB,CAAA;;;wBAAG,KAAA,SAAS,CAAA;;;wBAPjF,iBAAiB,IAOf,aAAU,KAAqE;4BAC/E,aAAU,GAAE,QAAQ,CAAC,UAAU;4BAC/B,MAAG,GAAE,QAAQ,CAAC,GAAG;+BAClB,CAAC;wBAEI,uBAAqB,WAAW,CAAC,kBAAkB,CAAC;wBAC1D,IAAI,oBAAkB,EAAE;4BAChB,YAAY,GAA2C,QAAQ,CAAC,IAAI,IAAI,SAAS,CAAC;4BAExF,IAAI,gBAAgB,CAAC,YAAY,CAAC,EAAE;gCAC9B,gBAAc,CAAC,CAAC;gCACd,oBAAoB,GAAG,IAAI,SAAS,CAAC;oCACzC,SAAS,EAAE,UAAC,KAAsB,EAAE,SAAS,EAAE,QAAQ;wCACrD,aAAW,IAAI,KAAK,CAAC,MAAM,CAAC;wCAC5B,oBAAkB,CAAC,EAAE,WAAW,eAAA,EAAE,CAAC,CAAC;wCACpC,QAAQ,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;oCAC7B,CAAC;iCACF,CAAC,CAAC;gCACH,YAAY,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;gCACxC,iBAAiB,CAAC,kBAAkB,GAAG,oBAAoB,CAAC;6BAC7D;iCAAM;gCACC,WAAS,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,CAAE,CAAC,IAAI,SAAS,CAAC;gCACrE,IAAI,QAAM,EAAE;oCACV,wEAAwE;oCACxE,oBAAkB,CAAC,EAAE,WAAW,EAAE,QAAM,EAAE,CAAC,CAAC;iCAC7C;6BACF;yBACF;wBAED,qBAAM,IAAI,CAAC,cAAc,CAAC,iBAAiB,CAAC,EAAA;;wBAA5C,SAA4C,CAAC;wBAE7C,sBAAO,iBAAiB,EAAC;;;wBAEnB,UAAU,GAAe,OAAK,CAAC;wBACrC,IAAI,UAAU,CAAC,IAAI,KAAK,WAAW,EAAE;4BACnC,MAAM,IAAI,SAAS,CACjB,UAAU,CAAC,OAAO,EAClB,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;6BAAM,IAAI,UAAU,CAAC,IAAI,KAAK,SAAS,EAAE;4BACxC,MAAM,IAAI,SAAS,CACjB,yBAAyB,EACzB,SAAS,CAAC,qBAAqB,EAC/B,SAAS,EACT,WAAW,CACZ,CAAC;yBACH;wBAED,MAAM,UAAU,CAAC;;wBAEjB,0BAA0B;wBAC1B,IAAI,WAAW,CAAC,WAAW,IAAI,aAAa,EAAE;4BACxC,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BACzC,IAAI,gBAAgB,CAAC,IAAI,CAAC,EAAE;gCAC1B,gBAAgB,GAAG,gBAAgB,CAAC,IAAI,CAAC,CAAC;6BAC3C;4BACG,kBAAkB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;4BAC3C,IAAI,gBAAgB,CAAC,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,kBAAkB,CAAC,EAAE;gCAC3D,kBAAkB,GAAG,gBAAgB,CAAC,iBAAkB,CAAC,kBAAkB,CAAC,CAAC;6BAC9E;4BAED,OAAO,CAAC,GAAG,CAAC,CAAC,gBAAgB,EAAE,kBAAkB,CAAC,CAAC;iCAChD,IAAI,CAAC;;gCACJ,MAAA,WAAW,CAAC,WAAW,0CAAE,mBAAmB,CAAC,OAAO,EAAE,aAAc,EAAE;gCACtE,OAAO;4BACT,CAAC,CAAC;iCACD,KAAK,CAAC,UAAC,EAAE,IAAM,CAAC,CAAC,CAAC;yBACtB;;;;;;KAEJ;IAKH,sBAAC;AAAD,CAAC,AAxMD,IAwMC;;AAED,SAAS,gBAAgB,CAAC,IAAS;IACjC,OAAO,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC;AACjD,CAAC;AAED,SAAS,gBAAgB,CAAC,MAAgB;IACxC,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO;QACzB,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;QAC1B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,YAAY,CAAC,OAAgB;IAC3C,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;IAEtC,OAAO,CAAC,OAAO,CAAC,UAAC,KAAK,EAAE,GAAG;QACzB,WAAW,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts new file mode 100644 index 0000000000..5f5f00e974 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts @@ -0,0 +1,7 @@ +import { RequestPolicy } from "./policies/requestPolicy"; +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy { +} +//# sourceMappingURL=httpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map new file mode 100644 index 0000000000..2da0af7489 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.d.ts","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEzD;;GAEG;AACH,MAAM,WAAW,UAAW,SAAQ,aAAa;CAAG"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js new file mode 100644 index 0000000000..8dca936c6e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map new file mode 100644 index 0000000000..e8c060fd6a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpClient.js","sourceRoot":"","sources":["../../lib/httpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts new file mode 100644 index 0000000000..4f95f2eaea --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts @@ -0,0 +1,132 @@ +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + /** + * The value of the header. + */ + value: string; +} +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export declare type RawHttpHeaders = { + [headerName: string]: string; +}; +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} +export declare function isHttpHeadersLike(object?: any): object is HttpHeadersLike; +/** + * A collection of HTTP header key/value pairs. + */ +export declare class HttpHeaders { + private readonly _headersMap; + constructor(rawHeaders?: RawHttpHeaders); + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header names that are contained in this collection. + */ + headerValues(): string[]; + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(): RawHttpHeaders; + /** + * Get the string representation of this HTTP header collection. + */ + toString(): string; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeaders; +} +//# sourceMappingURL=httpHeaders.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map new file mode 100644 index 0000000000..051d138cc2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.d.ts","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAUA;;GAEG;AACH,MAAM,WAAW,UAAU;IACzB;;OAEG;IACH,IAAI,EAAE,MAAM,CAAC;IAEb;;OAEG;IACH,KAAK,EAAE,MAAM,CAAC;CACf;AAED;;GAEG;AACH,oBAAY,cAAc,GAAG;IAAE,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,CAAC;AAE9D;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;;;;OAKG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI,CAAC;IAC5D;;;;OAIG;IACH,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAAC;IAC5C;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACtC;;;;OAIG;IACH,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC;IACpC;;OAEG;IACH,UAAU,IAAI,cAAc,CAAC;IAC7B;;OAEG;IACH,YAAY,IAAI,UAAU,EAAE,CAAC;IAC7B;;OAEG;IACH,WAAW,IAAI,MAAM,EAAE,CAAC;IACxB;;OAEG;IACH,YAAY,IAAI,MAAM,EAAE,CAAC;IACzB;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;IACzB;;;OAGG;IACH,MAAM,IAAI,cAAc,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,CAAC,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAoBzE;AAED;;GAEG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAsC;gBAEtD,UAAU,CAAC,EAAE,cAAc;IASvC;;;;;OAKG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,IAAI;IAOlE;;;;OAIG;IACI,GAAG,CAAC,UAAU,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;IAKlD;;OAEG;IACI,QAAQ,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAI5C;;;;OAIG;IACI,MAAM,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO;IAM1C;;OAEG;IACI,UAAU,IAAI,cAAc;IASnC;;OAEG;IACI,YAAY,IAAI,UAAU,EAAE;IAQnC;;OAEG;IACI,WAAW,IAAI,MAAM,EAAE;IAS9B;;OAEG;IACI,YAAY,IAAI,MAAM,EAAE;IAS/B;;OAEG;IACI,MAAM,IAAI,cAAc;IAI/B;;OAEG;IACI,QAAQ,IAAI,MAAM;IAIzB;;OAEG;IACI,KAAK,IAAI,WAAW;CAG5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js new file mode 100644 index 0000000000..0cc2af2711 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +export function isHttpHeadersLike(object) { + if (!object || typeof object !== "object") { + return false; + } + if (typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function") { + return true; + } + return false; +} +/** + * A collection of HTTP header key/value pairs. + */ +var HttpHeaders = /** @class */ (function () { + function HttpHeaders(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (var headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + HttpHeaders.prototype.set = function (headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + }; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + HttpHeaders.prototype.get = function (headerName) { + var header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + }; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + HttpHeaders.prototype.contains = function (headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + }; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + HttpHeaders.prototype.remove = function (headerName) { + var result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + }; + /** + * Get the headers that are contained this collection as an object. + */ + HttpHeaders.prototype.rawHeaders = function () { + var result = {}; + for (var headerKey in this._headersMap) { + var header = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + }; + /** + * Get the headers that are contained in this collection as an array. + */ + HttpHeaders.prototype.headersArray = function () { + var headers = []; + for (var headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerNames = function () { + var headerNames = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + }; + /** + * Get the header names that are contained in this collection. + */ + HttpHeaders.prototype.headerValues = function () { + var headerValues = []; + var headers = this.headersArray(); + for (var i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + }; + /** + * Get the JSON object representation of this HTTP header collection. + */ + HttpHeaders.prototype.toJson = function () { + return this.rawHeaders(); + }; + /** + * Get the string representation of this HTTP header collection. + */ + HttpHeaders.prototype.toString = function () { + return JSON.stringify(this.toJson()); + }; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + HttpHeaders.prototype.clone = function () { + return new HttpHeaders(this.rawHeaders()); + }; + return HttpHeaders; +}()); +export { HttpHeaders }; +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map new file mode 100644 index 0000000000..3a4d8ac4fc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpHeaders.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpHeaders.js","sourceRoot":"","sources":["../../lib/httpHeaders.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,SAAS,YAAY,CAAC,UAAkB;IACtC,OAAO,UAAU,CAAC,WAAW,EAAE,CAAC;AAClC,CAAC;AA4ED,MAAM,UAAU,iBAAiB,CAAC,MAAY;IAC5C,IAAI,CAAC,MAAM,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QACzC,OAAO,KAAK,CAAC;KACd;IAED,IACE,OAAO,MAAM,CAAC,UAAU,KAAK,UAAU;QACvC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU;QAClC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,GAAG,KAAK,UAAU;QAChC,OAAO,MAAM,CAAC,QAAQ,KAAK,UAAU;QACrC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU;QACnC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,YAAY,KAAK,UAAU;QACzC,OAAO,MAAM,CAAC,WAAW,KAAK,UAAU;QACxC,OAAO,MAAM,CAAC,MAAM,KAAK,UAAU,EACnC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;GAEG;AACH;IAGE,qBAAY,UAA2B;QACrC,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;QACtB,IAAI,UAAU,EAAE;YACd,KAAK,IAAM,UAAU,IAAI,UAAU,EAAE;gBACnC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC;aAC9C;SACF;IACH,CAAC;IAED;;;;;OAKG;IACI,yBAAG,GAAV,UAAW,UAAkB,EAAE,WAA4B;QACzD,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,GAAG;YAC3C,IAAI,EAAE,UAAU;YAChB,KAAK,EAAE,WAAW,CAAC,QAAQ,EAAE;SAC9B,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,yBAAG,GAAV,UAAW,UAAkB;QAC3B,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QACtE,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC;IAC5C,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf,UAAgB,UAAkB;QAChC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACI,4BAAM,GAAb,UAAc,UAAkB;QAC9B,IAAM,MAAM,GAAY,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC;QAClD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,gCAAU,GAAjB;QACE,IAAM,MAAM,GAAmB,EAAE,CAAC;QAClC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,IAAM,MAAM,GAAe,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC;YACvD,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,CAAC;SAClD;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,OAAO,GAAiB,EAAE,CAAC;QACjC,KAAK,IAAM,SAAS,IAAI,IAAI,CAAC,WAAW,EAAE;YACxC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,CAAC;SAC3C;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,iCAAW,GAAlB;QACE,IAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,WAAW,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;SACnC;QACD,OAAO,WAAW,CAAC;IACrB,CAAC;IAED;;OAEG;IACI,kCAAY,GAAnB;QACE,IAAM,YAAY,GAAa,EAAE,CAAC;QAClC,IAAM,OAAO,GAAiB,IAAI,CAAC,YAAY,EAAE,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,YAAY,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;SACrC;QACD,OAAO,YAAY,CAAC;IACtB,CAAC;IAED;;OAEG;IACI,4BAAM,GAAb;QACE,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC;IAC3B,CAAC;IAED;;OAEG;IACI,8BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;IACvC,CAAC;IAED;;OAEG;IACI,2BAAK,GAAZ;QACE,OAAO,IAAI,WAAW,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,CAAC;IAC5C,CAAC;IACH,kBAAC;AAAD,CAAC,AAxHD,IAwHC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts new file mode 100644 index 0000000000..d5790e0451 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts @@ -0,0 +1,83 @@ +/// +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + /** + * The HTTP response status (e.g. 200) + */ + status: number; + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob { + } +} +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { + [key: string]: any; + }; + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + [key: string]: any; +} +//# sourceMappingURL=httpOperationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map new file mode 100644 index 0000000000..778b5e07aa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.d.ts","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAChD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD;;GAEG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IAEzB;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;IAEf;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;CAC1B;AAED,OAAO,CAAC,MAAM,CAAC;IACb;;;OAGG;IACH,UAAU,IAAI;KAAG;CAClB;AAED;;;GAGG;AACH,MAAM,WAAW,qBAAsB,SAAQ,YAAY;IACzD;;OAEG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAEvC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;IAE3B;;OAEG;IACH,UAAU,CAAC,EAAE,GAAG,CAAC;IAEjB;;;;;OAKG;IACH,QAAQ,CAAC,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;IAEzB;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,CAAC,cAAc,CAAC;IAE3C;;OAEG;IACH,UAAU,CAAC,EAAE,OAAO,CAAC;IAErB;;OAEG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED;;;;GAIG;AACH,MAAM,WAAW,YAAY;IAC3B;;OAEG;IACH,SAAS,EAAE,qBAAqB,CAAC;IAEjC,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js new file mode 100644 index 0000000000..bdb3308488 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map new file mode 100644 index 0000000000..14302d6781 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpOperationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpOperationResponse.js","sourceRoot":"","sources":["../../lib/httpOperationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts new file mode 100644 index 0000000000..7a13ab2918 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts @@ -0,0 +1,22 @@ +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export declare enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF = 0, + /** + * An error log. + */ + ERROR = 1, + /** + * A warning log. + */ + WARNING = 2, + /** + * An information log. + */ + INFO = 3 +} +//# sourceMappingURL=httpPipelineLogLevel.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map new file mode 100644 index 0000000000..91f637ccb0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,oBAAoB;IAC9B;;OAEG;IACH,GAAG,IAAA;IAEH;;OAEG;IACH,KAAK,IAAA;IAEL;;OAEG;IACH,OAAO,IAAA;IAEP;;OAEG;IACH,IAAI,IAAA;CACL"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js new file mode 100644 index 0000000000..8971a068d4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + /** + * A log level that indicates that no logs will be logged. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + /** + * An error log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + /** + * A warning log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; + /** + * An information log. + */ + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map new file mode 100644 index 0000000000..d933399fab --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogLevel.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogLevel.js","sourceRoot":"","sources":["../../lib/httpPipelineLogLevel.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,oBAoBX;AApBD,WAAY,oBAAoB;IAC9B;;OAEG;IACH,6DAAG,CAAA;IAEH;;OAEG;IACH,iEAAK,CAAA;IAEL;;OAEG;IACH,qEAAO,CAAA;IAEP;;OAEG;IACH,+DAAI,CAAA;AACN,CAAC,EApBW,oBAAoB,KAApB,oBAAoB,QAoB/B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts new file mode 100644 index 0000000000..fdbf56c8e5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts @@ -0,0 +1,35 @@ +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export declare class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + minimumLogLevel: HttpPipelineLogLevel; + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(minimumLogLevel: HttpPipelineLogLevel); + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=httpPipelineLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map new file mode 100644 index 0000000000..eac81c6bc5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.d.ts","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAE9D;;;GAGG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,eAAe,EAAE,oBAAoB,CAAC;IAEtC;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,yBAA0B,YAAW,kBAAkB;IAK/C,eAAe,EAAE,oBAAoB;IAJxD;;;OAGG;gBACgB,eAAe,EAAE,oBAAoB;IAExD;;;;OAIG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAgB3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js new file mode 100644 index 0000000000..a8b1cb53fe --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +var ConsoleHttpPipelineLogger = /** @class */ (function () { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + function ConsoleHttpPipelineLogger(minimumLogLevel) { + this.minimumLogLevel = minimumLogLevel; + } + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + ConsoleHttpPipelineLogger.prototype.log = function (logLevel, message) { + var logMessage = HttpPipelineLogLevel[logLevel] + ": " + message; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + }; + return ConsoleHttpPipelineLogger; +}()); +export { ConsoleHttpPipelineLogger }; +//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map new file mode 100644 index 0000000000..00f17beb13 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/httpPipelineLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"httpPipelineLogger.js","sourceRoot":"","sources":["../../lib/httpPipelineLogger.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAoB9D;;GAEG;AACH;IACE;;;OAGG;IACH,mCAAmB,eAAqC;QAArC,oBAAe,GAAf,eAAe,CAAsB;IAAG,CAAC;IAE5D;;;;OAIG;IACH,uCAAG,GAAH,UAAI,QAA8B,EAAE,OAAe;QACjD,IAAM,UAAU,GAAM,oBAAoB,CAAC,QAAQ,CAAC,UAAK,OAAS,CAAC;QACnE,QAAQ,QAAQ,EAAE;YAChB,KAAK,oBAAoB,CAAC,KAAK;gBAC7B,OAAO,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC1B,MAAM;YAER,KAAK,oBAAoB,CAAC,OAAO;gBAC/B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;gBACzB,MAAM;YAER,KAAK,oBAAoB,CAAC,IAAI;gBAC5B,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;gBACxB,MAAM;SACT;IACH,CAAC;IACH,gCAAC;AAAD,CAAC,AA5BD,IA4BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts new file mode 100644 index 0000000000..1244b50f07 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts @@ -0,0 +1,42 @@ +/// +export { WebResource, WebResourceLike, HttpRequestBody, RequestPrepareOptions, HttpMethods, ParameterValue, RequestOptionsBase, TransferProgressEvent, AbortSignalLike, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { OperationParameter, OperationQueryParameter, OperationURLParameter, ParameterPath, } from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { AgentSettings, ProxySettings, ServiceClient, ServiceClientOptions, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptions, RequestPolicyOptionsLike, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { TelemetryInfo, userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { DeserializationContentTypes, deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, SimpleMapperType, CompositeMapperType, DictionaryMapperType, SequenceMapperType, EnumMapperType, Mapper, BaseMapper, CompositeMapper, SequenceMapper, DictionaryMapper, EnumMapper, MapperConstraints, PolymorphicDiscriminator, Serializer, UrlParameterValue, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, ServiceCallback, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map new file mode 100644 index 0000000000..7737b322cc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.d.ts","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":";AAKA,OAAO,EACL,WAAW,EACX,eAAe,EACf,eAAe,EACf,qBAAqB,EACrB,WAAW,EACX,cAAc,EACd,kBAAkB,EAClB,qBAAqB,EACrB,eAAe,GAChB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,iBAAiB,EAAE,iBAAiB,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AACzF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,eAAe,CAAC;AACzF,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC5F,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACrB,aAAa,GACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EACL,aAAa,EACb,aAAa,EACb,aAAa,EACb,oBAAoB,EACpB,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,oBAAoB,EACpB,wBAAwB,GACzB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EACL,aAAa,EACb,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EACL,2BAA2B,EAC3B,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,gBAAgB,EAChB,mBAAmB,EACnB,oBAAoB,EACpB,kBAAkB,EAClB,cAAc,EACd,MAAM,EACN,UAAU,EACV,eAAe,EACf,cAAc,EACd,gBAAgB,EAChB,UAAU,EACV,iBAAiB,EACjB,wBAAwB,EACxB,UAAU,EACV,iBAAiB,EACjB,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EACT,eAAe,EACf,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAG7C,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AAC5D,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAAE,uBAAuB,EAAE,MAAM,iCAAiC,CAAC;AAC7F,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAClF,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AACpE,OAAO,EAAE,aAAa,EAAE,MAAM,2BAA2B,CAAC;AAC1D,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js b/node_modules/@azure/ms-rest-js/es/lib/msRest.js new file mode 100644 index 0000000000..ba1ead7f77 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/// +export { WebResource, } from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { HttpHeaders } from "./httpHeaders"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { ServiceClient, flattenResponse, } from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; +export { MapperType, Serializer, serializeObject, } from "./serializer"; +export { stripRequest, stripResponse, delay, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isNode, isDuration, } from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; +//# sourceMappingURL=msRest.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map new file mode 100644 index 0000000000..038a369ca3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/msRest.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRest.js","sourceRoot":"","sources":["../../lib/msRest.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,yCAAyC;AAEzC,OAAO,EACL,WAAW,GASZ,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAGxD,OAAO,EAAc,WAAW,EAAmC,MAAM,eAAe,CAAC;AAGzF,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAUxC,OAAO,EAGL,aAAa,EAEb,eAAe,GAChB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAC7C,OAAO,EAAE,SAAS,EAAE,MAAM,sBAAsB,CAAC;AACjD,OAAO,EACL,iBAAiB,EAGjB,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AACzE,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,uBAAuB,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAEL,eAAe,EACf,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAEL,qBAAqB,EACrB,uBAAuB,GACxB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EAcV,UAAU,EAEV,eAAe,GAChB,MAAM,cAAc,CAAC;AACtB,OAAO,EACL,YAAY,EACZ,aAAa,EACb,KAAK,EACL,2BAA2B,EAC3B,YAAY,EACZ,SAAS,EAET,iBAAiB,EACjB,wBAAwB,EACxB,WAAW,EACX,WAAW,EACX,MAAM,EACN,UAAU,GACX,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,OAAO,CAAC;AAE7C,cAAc;AACd,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAElE,OAAO,EAAE,8BAA8B,EAAE,MAAM,8CAA8C,CAAC;AAC9F,OAAO,EAAE,iBAAiB,EAA2B,MAAM,iCAAiC,CAAC;AAE7F,OAAO,EAAE,gBAAgB,EAAE,MAAM,gCAAgC,CAAC;AAClE,OAAO,EAAE,iBAAiB,EAAE,MAAM,iCAAiC,CAAC;AAEpE,OAAO,EAAE,8BAA8B,EAAE,MAAM,mDAAmD,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts new file mode 100644 index 0000000000..1a5dec442d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts @@ -0,0 +1,10 @@ +import { CommonRequestInfo, CommonRequestInit, CommonResponse, FetchHttpClient } from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class NodeFetchHttpClient extends FetchHttpClient { + private readonly cookieJar; + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; + prepareRequest(httpRequest: WebResourceLike): Promise>; + processRequest(operationResponse: HttpOperationResponse): Promise; +} +//# sourceMappingURL=nodeFetchHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map new file mode 100644 index 0000000000..faa1930ce6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.d.ts","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAQA,OAAO,EACL,iBAAiB,EACjB,iBAAiB,EACjB,cAAc,EACd,eAAe,EAChB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAGhD,qBAAa,mBAAoB,SAAQ,eAAe;IACtD,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAuD;IAE3E,KAAK,CAAC,KAAK,EAAE,iBAAiB,EAAE,IAAI,CAAC,EAAE,iBAAiB,GAAG,OAAO,CAAC,cAAc,CAAC;IAIlF,cAAc,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;IAgD3E,cAAc,CAAC,iBAAiB,EAAE,qBAAqB,GAAG,OAAO,CAAC,IAAI,CAAC;CAqB9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js new file mode 100644 index 0000000000..f919e6e280 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js @@ -0,0 +1,109 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as tough from "tough-cookie"; +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; +import { FetchHttpClient, } from "./fetchHttpClient"; +import { createProxyAgent } from "./proxyAgent"; +var NodeFetchHttpClient = /** @class */ (function (_super) { + __extends(NodeFetchHttpClient, _super); + function NodeFetchHttpClient() { + var _this = _super !== null && _super.apply(this, arguments) || this; + _this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + return _this; + } + NodeFetchHttpClient.prototype.fetch = function (input, init) { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, node_fetch(input, init)]; + }); + }); + }; + NodeFetchHttpClient.prototype.prepareRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var requestInit, cookieString, _a, httpAgent, httpsAgent, tunnel, options, agent; + var _this = this; + return __generator(this, function (_b) { + switch (_b.label) { + case 0: + requestInit = {}; + if (!(this.cookieJar && !httpRequest.headers.get("Cookie"))) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.getCookieString(httpRequest.url, function (err, cookie) { + if (err) { + reject(err); + } + else { + resolve(cookie); + } + }); + })]; + case 1: + cookieString = _b.sent(); + httpRequest.headers.set("Cookie", cookieString); + _b.label = 2; + case 2: + if (httpRequest.agentSettings) { + _a = httpRequest.agentSettings, httpAgent = _a.http, httpsAgent = _a.https; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } + else if (httpAgent) { + requestInit.agent = httpAgent; + } + } + else if (httpRequest.proxySettings) { + tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); + requestInit.agent = tunnel.agent; + } + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } + else { + options = { keepAlive: true }; + agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + return [2 /*return*/, requestInit]; + } + }); + }); + }; + NodeFetchHttpClient.prototype.processRequest = function (operationResponse) { + return __awaiter(this, void 0, void 0, function () { + var setCookieHeader_1; + var _this = this; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!this.cookieJar) return [3 /*break*/, 2]; + setCookieHeader_1 = operationResponse.headers.get("Set-Cookie"); + if (!(setCookieHeader_1 != undefined)) return [3 /*break*/, 2]; + return [4 /*yield*/, new Promise(function (resolve, reject) { + _this.cookieJar.setCookie(setCookieHeader_1, operationResponse.request.url, { ignoreError: true }, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + })]; + case 1: + _a.sent(); + _a.label = 2; + case 2: return [2 /*return*/]; + } + }); + }); + }; + return NodeFetchHttpClient; +}(FetchHttpClient)); +export { NodeFetchHttpClient }; +//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map new file mode 100644 index 0000000000..5834bfb9e7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/nodeFetchHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"nodeFetchHttpClient.js","sourceRoot":"","sources":["../../lib/nodeFetchHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,UAAU,MAAM,YAAY,CAAC;AAEpC,OAAO,EAIL,eAAe,GAChB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,gBAAgB,EAAc,MAAM,cAAc,CAAC;AAE5D;IAAyC,uCAAe;IAAxD;QAAA,qEA4EC;QA3EkB,eAAS,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;;IA2EnF,CAAC;IAzEO,mCAAK,GAAX,UAAY,KAAwB,EAAE,IAAwB;;;gBAC5D,sBAAQ,UAAU,CAAC,KAAK,EAAE,IAAI,CAAwC,EAAC;;;KACxE;IAEK,4CAAc,GAApB,UAAqB,WAA4B;;;;;;;wBACzC,WAAW,GAA2C,EAAE,CAAC;6BAE3D,CAAA,IAAI,CAAC,SAAS,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAA,EAApD,wBAAoD;wBACjC,qBAAM,IAAI,OAAO,CAAS,UAAC,OAAO,EAAE,MAAM;gCAC7D,KAAI,CAAC,SAAU,CAAC,eAAe,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,MAAM;oCAC3D,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,CAAC,MAAM,CAAC,CAAC;qCACjB;gCACH,CAAC,CAAC,CAAC;4BACL,CAAC,CAAC,EAAA;;wBARI,YAAY,GAAG,SAQnB;wBAEF,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC;;;wBAGlD,IAAI,WAAW,CAAC,aAAa,EAAE;4BACvB,KAAyC,WAAW,CAAC,aAAa,EAA1D,SAAS,UAAA,EAAS,UAAU,WAAA,CAA+B;4BACzE,IAAI,UAAU,IAAI,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gCACrD,WAAW,CAAC,KAAK,GAAG,UAAU,CAAC;6BAChC;iCAAM,IAAI,SAAS,EAAE;gCACpB,WAAW,CAAC,KAAK,GAAG,SAAS,CAAC;6BAC/B;yBACF;6BAAM,IAAI,WAAW,CAAC,aAAa,EAAE;4BAC9B,MAAM,GAAe,gBAAgB,CACzC,WAAW,CAAC,GAAG,EACf,WAAW,CAAC,aAAa,EACzB,WAAW,CAAC,OAAO,CACpB,CAAC;4BACF,WAAW,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC;yBAClC;wBAED,IAAI,WAAW,CAAC,SAAS,KAAK,IAAI,EAAE;4BAClC,IAAI,WAAW,CAAC,KAAK,EAAE;gCACrB,WAAW,CAAC,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;6BACpC;iCAAM;gCACC,OAAO,GAA2C,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;gCACtE,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,OAAO,CAAC;oCAC/C,CAAC,CAAC,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC;oCAC1B,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;gCAC5B,WAAW,CAAC,KAAK,GAAG,KAAK,CAAC;6BAC3B;yBACF;wBAED,sBAAO,WAAW,EAAC;;;;KACpB;IAEK,4CAAc,GAApB,UAAqB,iBAAwC;;;;;;;6BACvD,IAAI,CAAC,SAAS,EAAd,wBAAc;wBACV,oBAAkB,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;6BAChE,CAAA,iBAAe,IAAI,SAAS,CAAA,EAA5B,wBAA4B;wBAC9B,qBAAM,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gCAChC,KAAI,CAAC,SAAU,CAAC,SAAS,CACvB,iBAAe,EACf,iBAAiB,CAAC,OAAO,CAAC,GAAG,EAC7B,EAAE,WAAW,EAAE,IAAI,EAAE,EACrB,UAAC,GAAG;oCACF,IAAI,GAAG,EAAE;wCACP,MAAM,CAAC,GAAG,CAAC,CAAC;qCACb;yCAAM;wCACL,OAAO,EAAE,CAAC;qCACX;gCACH,CAAC,CACF,CAAC;4BACJ,CAAC,CAAC,EAAA;;wBAbF,SAaE,CAAC;;;;;;KAGR;IACH,0BAAC;AAAD,CAAC,AA5ED,CAAyC,eAAe,GA4EvD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts new file mode 100644 index 0000000000..7eadcda917 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts @@ -0,0 +1,15 @@ +import { RequestOptionsBase } from "./webResource"; +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} +//# sourceMappingURL=operationArguments.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map new file mode 100644 index 0000000000..e05a58fee3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.d.ts","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;OAEG;IACH,CAAC,aAAa,EAAE,MAAM,GAAG,GAAG,CAAC;IAE7B;;OAEG;IACH,OAAO,CAAC,EAAE,kBAAkB,CAAC;CAC9B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js new file mode 100644 index 0000000000..c8bbde71fb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map new file mode 100644 index 0000000000..ee2fe5c7fb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationArguments.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationArguments.js","sourceRoot":"","sources":["../../lib/operationArguments.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts new file mode 100644 index 0000000000..f5b78021bf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts @@ -0,0 +1,51 @@ +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; +export declare type ParameterPath = string | string[] | { + [propertyName: string]: ParameterPath; +}; +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export declare function getPathStringFromParameter(parameter: OperationParameter): string; +export declare function getPathStringFromParameterPath(parameterPath: ParameterPath, mapper: Mapper): string; +//# sourceMappingURL=operationParameter.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map new file mode 100644 index 0000000000..2cc31fa21a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.d.ts","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC,oBAAY,aAAa,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,aAAa,CAAA;CAAE,CAAC;AAE1F;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,EAAE,aAAa,CAAC;IAE7B;;OAEG;IACH,MAAM,EAAE,MAAM,CAAC;CAChB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAsB,SAAQ,kBAAkB;IAC/D;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;CACxB;AAED;;;GAGG;AACH,MAAM,WAAW,uBAAwB,SAAQ,kBAAkB;IACjE;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IAEvB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,qBAAqB,CAAC;CAC1C;AAED;;;;GAIG;AACH,wBAAgB,0BAA0B,CAAC,SAAS,EAAE,kBAAkB,GAAG,MAAM,CAEhF;AAED,wBAAgB,8BAA8B,CAC5C,aAAa,EAAE,aAAa,EAC5B,MAAM,EAAE,MAAM,GACb,MAAM,CAUR"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js new file mode 100644 index 0000000000..8ab319a67b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +export function getPathStringFromParameterPath(parameterPath, mapper) { + var result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map new file mode 100644 index 0000000000..ba98f5f1bf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationParameter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationParameter.js","sourceRoot":"","sources":["../../lib/operationParameter.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAkD/F;;;;GAIG;AACH,MAAM,UAAU,0BAA0B,CAAC,SAA6B;IACtE,OAAO,8BAA8B,CAAC,SAAS,CAAC,aAAa,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC;AACnF,CAAC;AAED,MAAM,UAAU,8BAA8B,CAC5C,aAA4B,EAC5B,MAAc;IAEd,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,MAAM,GAAG,aAAa,CAAC;KACxB;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QACvC,MAAM,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAClC;SAAM;QACL,MAAM,GAAG,MAAM,CAAC,cAAe,CAAC;KACjC;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts new file mode 100644 index 0000000000..f99c6003b2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts @@ -0,0 +1,15 @@ +import { Mapper } from "./serializer"; +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} +//# sourceMappingURL=operationResponse.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map new file mode 100644 index 0000000000..d96474af55 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.d.ts","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAC;AAEtC;;GAEG;AACH,MAAM,WAAW,iBAAiB;IAChC;;OAEG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js new file mode 100644 index 0000000000..b6e3281316 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js @@ -0,0 +1,3 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map new file mode 100644 index 0000000000..a31b9a5f6a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationResponse.js","sourceRoot":"","sources":["../../lib/operationResponse.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts new file mode 100644 index 0000000000..9be13076ae --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts @@ -0,0 +1,68 @@ +import { OperationParameter, OperationQueryParameter, OperationURLParameter } from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { + [responseCode: string]: OperationResponse; + }; +} +export declare function isStreamOperation(operationSpec: OperationSpec): boolean; +//# sourceMappingURL=operationSpec.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map new file mode 100644 index 0000000000..f2d4efb079 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.d.ts","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,kBAAkB,EAClB,uBAAuB,EACvB,qBAAqB,EACtB,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AACtD,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAE5C;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,UAAU,CAAC;IAEhC;;OAEG;IACH,QAAQ,CAAC,UAAU,EAAE,WAAW,CAAC;IAEjC;;;;OAIG;IACH,QAAQ,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE1B;;;OAGG;IACH,QAAQ,CAAC,IAAI,CAAC,EAAE,MAAM,CAAC;IAEvB;;;OAGG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,MAAM,CAAC;IAE9B;;OAEG;IACH,QAAQ,CAAC,WAAW,CAAC,EAAE,kBAAkB,CAAC;IAE1C;;OAEG;IACH,QAAQ,CAAC,KAAK,CAAC,EAAE,OAAO,CAAC;IAEzB;;OAEG;IACH,QAAQ,CAAC,aAAa,CAAC,EAAE,aAAa,CAAC,qBAAqB,CAAC,CAAC;IAE9D;;OAEG;IACH,QAAQ,CAAC,eAAe,CAAC,EAAE,aAAa,CAAC,uBAAuB,CAAC,CAAC;IAElE;;;OAGG;IACH,QAAQ,CAAC,gBAAgB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAE9D;;;OAGG;IACH,QAAQ,CAAC,kBAAkB,CAAC,EAAE,aAAa,CAAC,kBAAkB,CAAC,CAAC;IAEhE;;;OAGG;IACH,QAAQ,CAAC,SAAS,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,iBAAiB,CAAA;KAAE,CAAC;CACnE;AAED,wBAAgB,iBAAiB,CAAC,aAAa,EAAE,aAAa,GAAG,OAAO,CAavE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js new file mode 100644 index 0000000000..06b8223d6c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { MapperType } from "./serializer"; +export function isStreamOperation(operationSpec) { + var result = false; + for (var statusCode in operationSpec.responses) { + var operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result = true; + break; + } + } + return result; +} +//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map new file mode 100644 index 0000000000..667db1ad87 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/operationSpec.js.map @@ -0,0 +1 @@ +{"version":3,"file":"operationSpec.js","sourceRoot":"","sources":["../../lib/operationSpec.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAQ/F,OAAO,EAAE,UAAU,EAAc,MAAM,cAAc,CAAC;AA2EtD,MAAM,UAAU,iBAAiB,CAAC,aAA4B;IAC5D,IAAI,MAAM,GAAG,KAAK,CAAC;IACnB,KAAK,IAAM,UAAU,IAAI,aAAa,CAAC,SAAS,EAAE;QAChD,IAAM,iBAAiB,GAAsB,aAAa,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;QACjF,IACE,iBAAiB,CAAC,UAAU;YAC5B,iBAAiB,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,MAAM,EAC5D;YACA,MAAM,GAAG,IAAI,CAAC;YACd,MAAM;SACP;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts new file mode 100644 index 0000000000..7893df0c5d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts @@ -0,0 +1,10 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map new file mode 100644 index 0000000000..eefa217f42 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js new file mode 100644 index 0000000000..ac61fd5a69 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); +export function agentPolicy(_agentSettings) { + return { + create: function (_nextPolicy, _options) { + throw agentNotSupportedInBrowser; + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw agentNotSupportedInBrowser; + return _this; + } + AgentPolicy.prototype.sendRequest = function (_request) { + throw agentNotSupportedInBrowser; + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map new file mode 100644 index 0000000000..eec280a1d1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts new file mode 100644 index 0000000000..84d197e029 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts @@ -0,0 +1,11 @@ +import { AgentSettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory; +export declare class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, agentSettings: AgentSettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=agentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map new file mode 100644 index 0000000000..429e0a5249 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAM/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js new file mode 100644 index 0000000000..4cdfb5acbe --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function agentPolicy(agentSettings) { + return { + create: function (nextPolicy, options) { + return new AgentPolicy(nextPolicy, options, agentSettings); + }, + }; +} +var AgentPolicy = /** @class */ (function (_super) { + __extends(AgentPolicy, _super); + function AgentPolicy(nextPolicy, options, agentSettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.agentSettings = agentSettings; + return _this; + } + AgentPolicy.prototype.sendRequest = function (request) { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + }; + return AgentPolicy; +}(BaseRequestPolicy)); +export { AgentPolicy }; +//# sourceMappingURL=agentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map new file mode 100644 index 0000000000..15d82ec233 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/agentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/agentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;YAC1B,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts new file mode 100644 index 0000000000..de2ef2c408 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts @@ -0,0 +1,38 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export declare function deserializationPolicy(deserializationContentTypes?: DeserializationContentTypes): RequestPolicyFactory; +export declare const defaultJsonContentTypes: string[]; +export declare const defaultXmlContentTypes: string[]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export declare class DeserializationPolicy extends BaseRequestPolicy { + readonly jsonContentTypes: string[]; + readonly xmlContentTypes: string[]; + constructor(nextPolicy: RequestPolicy, deserializationContentTypes: DeserializationContentTypes | undefined, options: RequestPolicyOptionsLike); + sendRequest(request: WebResourceLike): Promise; +} +export declare function deserializeResponseBody(jsonContentTypes: string[], xmlContentTypes: string[], response: HttpOperationResponse): Promise; +//# sourceMappingURL=deserializationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map new file mode 100644 index 0000000000..f5771b5b9c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;;GAGG;AACH,MAAM,WAAW,2BAA2B;IAC1C;;;OAGG;IACH,IAAI,CAAC,EAAE,MAAM,EAAE,CAAC;IAEhB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,EAAE,CAAC;CAChB;AAED;;;GAGG;AACH,wBAAgB,qBAAqB,CACnC,2BAA2B,CAAC,EAAE,2BAA2B,GACxD,oBAAoB,CAMtB;AAED,eAAO,MAAM,uBAAuB,UAAoC,CAAC;AACzE,eAAO,MAAM,sBAAsB,UAA8C,CAAC;AAElF;;;GAGG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,SAAgB,gBAAgB,EAAE,MAAM,EAAE,CAAC;IAC3C,SAAgB,eAAe,EAAE,MAAM,EAAE,CAAC;gBAGxC,UAAU,EAAE,aAAa,EACzB,2BAA2B,EAAE,2BAA2B,GAAG,SAAS,EACpE,OAAO,EAAE,wBAAwB;IAUtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAOnF;AAsCD,wBAAgB,uBAAuB,CACrC,gBAAgB,EAAE,MAAM,EAAE,EAC1B,eAAe,EAAE,MAAM,EAAE,EACzB,QAAQ,EAAE,qBAAqB,GAC9B,OAAO,CAAC,qBAAqB,CAAC,CA4HhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js new file mode 100644 index 0000000000..833bb48e2e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { BaseRequestPolicy, } from "./requestPolicy"; +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy(deserializationContentTypes) { + return { + create: function (nextPolicy, options) { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} +export var defaultJsonContentTypes = ["application/json", "text/json"]; +export var defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +var DeserializationPolicy = /** @class */ (function (_super) { + __extends(DeserializationPolicy, _super); + function DeserializationPolicy(nextPolicy, deserializationContentTypes, options) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + _this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + return _this; + } + DeserializationPolicy.prototype.sendRequest = function (request) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy + .sendRequest(request) + .then(function (response) { + return deserializeResponseBody(_this.jsonContentTypes, _this.xmlContentTypes, response); + })]; + }); + }); + }; + return DeserializationPolicy; +}(BaseRequestPolicy)); +export { DeserializationPolicy }; +function getOperationResponse(parsedResponse) { + var result; + var request = parsedResponse.request; + var operationSpec = request.operationSpec; + if (operationSpec) { + var operationResponseGetter = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + var shouldDeserialize = parsedResponse.request.shouldDeserialize; + var result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response) { + return parse(jsonContentTypes, xmlContentTypes, response).then(function (parsedResponse) { + var shouldDeserialize = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + var operationSpec = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + var statusCode = parsedResponse.status; + var expectedStatusCodes = Object.keys(operationSpec.responses); + var hasNoExpectedStatusCodes = expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + var responseSpec = getOperationResponse(parsedResponse); + var isExpectedStatusCode = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + var defaultResponseSpec = operationSpec.responses.default; + if (defaultResponseSpec) { + var initialErrorMessage = isStreamOperation(operationSpec) + ? "Unexpected status code: " + statusCode + : parsedResponse.bodyAsText; + var error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + var parsedErrorResponse = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + var defaultResponseBodyMapper = defaultResponseSpec.bodyMapper; + if (defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError") { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } + else { + var internalError = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + if (defaultResponseBodyMapper) { + var valueToDeserialize = parsedErrorResponse; + if (operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName] + : []; + } + error.body = operationSpec.serializer.deserialize(defaultResponseBodyMapper, valueToDeserialize, "error.body"); + } + } + } + catch (defaultError) { + error.message = "Error \"" + defaultError.message + "\" occurred in deserializing the responseBody - \"" + parsedResponse.bodyAsText + "\" for the default response."; + } + return Promise.reject(error); + } + } + else if (responseSpec) { + if (responseSpec.bodyMapper) { + var valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody"); + } + catch (error) { + var restError = new RestError("Error " + error + " occurred in deserializing the responseBody - " + parsedResponse.bodyAsText); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.rawHeaders(), "operationRes.parsedHeaders"); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} +function parse(jsonContentTypes, xmlContentTypes, operationResponse) { + var errorHandler = function (err) { + var msg = "Error \"" + err + "\" occurred while parsing the response body - " + operationResponse.bodyAsText + "."; + var errCode = err.code || RestError.PARSE_ERROR; + var e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse, operationResponse.bodyAsText); + return Promise.reject(e); + }; + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + var text_1 = operationResponse.bodyAsText; + var contentType = operationResponse.headers.get("Content-Type") || ""; + var contentComponents = !contentType + ? [] + : contentType.split(";").map(function (component) { return component.toLowerCase(); }); + if (contentComponents.length === 0 || + contentComponents.some(function (component) { return jsonContentTypes.indexOf(component) !== -1; })) { + return new Promise(function (resolve) { + operationResponse.parsedBody = JSON.parse(text_1); + resolve(operationResponse); + }).catch(errorHandler); + } + else if (contentComponents.some(function (component) { return xmlContentTypes.indexOf(component) !== -1; })) { + return parseXML(text_1) + .then(function (body) { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + return Promise.resolve(operationResponse); +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map new file mode 100644 index 0000000000..44123c3c27 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/deserializationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"deserializationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/deserializationPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EAAiB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AACpE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAU,UAAU,EAAE,MAAM,eAAe,CAAC;AACnD,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AACvC,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAoBzB;;;GAGG;AACH,MAAM,UAAU,qBAAqB,CACnC,2BAAyD;IAEzD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,2BAA2B,EAAE,OAAO,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,IAAM,uBAAuB,GAAG,CAAC,kBAAkB,EAAE,WAAW,CAAC,CAAC;AACzE,MAAM,CAAC,IAAM,sBAAsB,GAAG,CAAC,iBAAiB,EAAE,sBAAsB,CAAC,CAAC;AAElF;;;GAGG;AACH;IAA2C,yCAAiB;IAI1D,+BACE,UAAyB,EACzB,2BAAoE,EACpE,OAAiC;QAHnC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAM3B;QAJC,KAAI,CAAC,gBAAgB;YACnB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,IAAI,CAAC,IAAI,uBAAuB,CAAC;QAC/F,KAAI,CAAC,eAAe;YAClB,CAAC,2BAA2B,IAAI,2BAA2B,CAAC,GAAG,CAAC,IAAI,sBAAsB,CAAC;;IAC/F,CAAC;IAEY,2CAAW,GAAxB,UAAyB,OAAwB;;;;gBAC/C,sBAAO,IAAI,CAAC,WAAW;yBACpB,WAAW,CAAC,OAAO,CAAC;yBACpB,IAAI,CAAC,UAAC,QAA+B;wBACpC,OAAA,uBAAuB,CAAC,KAAI,CAAC,gBAAgB,EAAE,KAAI,CAAC,eAAe,EAAE,QAAQ,CAAC;oBAA9E,CAA8E,CAC/E,EAAC;;;KACL;IACH,4BAAC;AAAD,CAAC,AAxBD,CAA2C,iBAAiB,GAwB3D;;AAED,SAAS,oBAAoB,CAC3B,cAAqC;IAErC,IAAI,MAAqC,CAAC;IAC1C,IAAM,OAAO,GAAoB,cAAc,CAAC,OAAO,CAAC;IACxD,IAAM,aAAa,GAA8B,OAAO,CAAC,aAAa,CAAC;IACvE,IAAI,aAAa,EAAE;QACjB,IAAM,uBAAuB,GAKa,OAAO,CAAC,uBAAuB,CAAC;QAC1E,IAAI,CAAC,uBAAuB,EAAE;YAC5B,MAAM,GAAG,aAAa,CAAC,SAAS,CAAC,cAAc,CAAC,MAAM,CAAC,CAAC;SACzD;aAAM;YACL,MAAM,GAAG,uBAAuB,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;SACjE;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,yBAAyB,CAAC,cAAqC;IACtE,IAAM,iBAAiB,GACrB,cAAc,CAAC,OAAO,CAAC,iBAAiB,CAAC;IAC3C,IAAI,MAAe,CAAC;IACpB,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,MAAM,GAAG,IAAI,CAAC;KACf;SAAM,IAAI,OAAO,iBAAiB,KAAK,SAAS,EAAE;QACjD,MAAM,GAAG,iBAAiB,CAAC;KAC5B;SAAM;QACL,MAAM,GAAG,iBAAiB,CAAC,cAAc,CAAC,CAAC;KAC5C;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,uBAAuB,CACrC,gBAA0B,EAC1B,eAAyB,EACzB,QAA+B;IAE/B,OAAO,KAAK,CAAC,gBAAgB,EAAE,eAAe,EAAE,QAAQ,CAAC,CAAC,IAAI,CAAC,UAAC,cAAc;QAC5E,IAAM,iBAAiB,GAAY,yBAAyB,CAAC,cAAc,CAAC,CAAC;QAC7E,IAAI,iBAAiB,EAAE;YACrB,IAAM,aAAa,GAA8B,cAAc,CAAC,OAAO,CAAC,aAAa,CAAC;YACtF,IAAI,aAAa,IAAI,aAAa,CAAC,SAAS,EAAE;gBAC5C,IAAM,UAAU,GAAW,cAAc,CAAC,MAAM,CAAC;gBAEjD,IAAM,mBAAmB,GAAa,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC;gBAE3E,IAAM,wBAAwB,GAC5B,mBAAmB,CAAC,MAAM,KAAK,CAAC;oBAChC,CAAC,mBAAmB,CAAC,MAAM,KAAK,CAAC,IAAI,mBAAmB,CAAC,CAAC,CAAC,KAAK,SAAS,CAAC,CAAC;gBAE7E,IAAM,YAAY,GAAkC,oBAAoB,CAAC,cAAc,CAAC,CAAC;gBAEzF,IAAM,oBAAoB,GAAY,wBAAwB;oBAC5D,CAAC,CAAC,GAAG,IAAI,UAAU,IAAI,UAAU,GAAG,GAAG;oBACvC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;gBACnB,IAAI,CAAC,oBAAoB,EAAE;oBACzB,IAAM,mBAAmB,GAAsB,aAAa,CAAC,SAAS,CAAC,OAAO,CAAC;oBAC/E,IAAI,mBAAmB,EAAE;wBACvB,IAAM,mBAAmB,GAAW,iBAAiB,CAAC,aAAa,CAAC;4BAClE,CAAC,CAAC,6BAA2B,UAAY;4BACzC,CAAC,CAAE,cAAc,CAAC,UAAqB,CAAC;wBAE1C,IAAM,KAAK,GAAG,IAAI,SAAS,CAAC,mBAAmB,CAAC,CAAC;wBACjD,KAAK,CAAC,UAAU,GAAG,UAAU,CAAC;wBAC9B,KAAK,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;wBAC3D,KAAK,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;wBAErD,IAAI,mBAAmB,GAA2B,cAAc,CAAC,UAAU,CAAC;wBAC5E,IAAI;4BACF,IAAI,mBAAmB,EAAE;gCACvB,IAAM,yBAAyB,GAC7B,mBAAmB,CAAC,UAAU,CAAC;gCACjC,IACE,yBAAyB;oCACzB,yBAAyB,CAAC,cAAc,KAAK,YAAY,EACzD;oCACA,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,mBAAmB,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCACjD;oCACD,IAAI,mBAAmB,CAAC,IAAI,EAAE;wCAC5B,KAAK,CAAC,IAAI,GAAG,mBAAmB,CAAC,IAAI,CAAC;qCACvC;oCACD,IAAI,mBAAmB,CAAC,OAAO,EAAE;wCAC/B,KAAK,CAAC,OAAO,GAAG,mBAAmB,CAAC,OAAO,CAAC;qCAC7C;iCACF;qCAAM;oCACL,IAAI,aAAa,GAAQ,mBAAmB,CAAC;oCAC7C,IAAI,mBAAmB,CAAC,KAAK,EAAE;wCAC7B,aAAa,GAAG,mBAAmB,CAAC,KAAK,CAAC;qCAC3C;oCAED,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,IAAI,CAAC;oCAChC,IAAI,aAAa,CAAC,OAAO,EAAE;wCACzB,KAAK,CAAC,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC;qCACvC;iCACF;gCAED,IAAI,yBAAyB,EAAE;oCAC7B,IAAI,kBAAkB,GAAQ,mBAAmB,CAAC;oCAClD,IACE,aAAa,CAAC,KAAK;wCACnB,yBAAyB,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAC3D;wCACA,kBAAkB;4CAChB,OAAO,mBAAmB,KAAK,QAAQ;gDACrC,CAAC,CAAC,mBAAmB,CAAC,yBAAyB,CAAC,cAAe,CAAC;gDAChE,CAAC,CAAC,EAAE,CAAC;qCACV;oCACD,KAAK,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC/C,yBAAyB,EACzB,kBAAkB,EAClB,YAAY,CACb,CAAC;iCACH;6BACF;yBACF;wBAAC,OAAO,YAAY,EAAE;4BACrB,KAAK,CAAC,OAAO,GAAG,aAAW,YAAY,CAAC,OAAO,0DAAqD,cAAc,CAAC,UAAU,iCAA8B,CAAC;yBAC7J;wBACD,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;qBAC9B;iBACF;qBAAM,IAAI,YAAY,EAAE;oBACvB,IAAI,YAAY,CAAC,UAAU,EAAE;wBAC3B,IAAI,kBAAkB,GAAQ,cAAc,CAAC,UAAU,CAAC;wBACxD,IAAI,aAAa,CAAC,KAAK,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,CAAC,QAAQ,EAAE;4BACpF,kBAAkB;gCAChB,OAAO,kBAAkB,KAAK,QAAQ;oCACpC,CAAC,CAAC,kBAAkB,CAAC,YAAY,CAAC,UAAU,CAAC,cAAe,CAAC;oCAC7D,CAAC,CAAC,EAAE,CAAC;yBACV;wBACD,IAAI;4BACF,cAAc,CAAC,UAAU,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CAC9D,YAAY,CAAC,UAAU,EACvB,kBAAkB,EAClB,yBAAyB,CAC1B,CAAC;yBACH;wBAAC,OAAO,KAAK,EAAE;4BACd,IAAM,SAAS,GAAG,IAAI,SAAS,CAC7B,WAAS,KAAK,sDAAiD,cAAc,CAAC,UAAY,CAC3F,CAAC;4BACF,SAAS,CAAC,OAAO,GAAG,KAAK,CAAC,YAAY,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC;4BAC/D,SAAS,CAAC,QAAQ,GAAG,KAAK,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC;4BACzD,OAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;yBAClC;qBACF;yBAAM,IAAI,aAAa,CAAC,UAAU,KAAK,MAAM,EAAE;wBAC9C,uGAAuG;wBACvG,cAAc,CAAC,UAAU,GAAG,QAAQ,CAAC,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,MAAM,GAAG,GAAG,CAAC;qBAC7E;oBAED,IAAI,YAAY,CAAC,aAAa,EAAE;wBAC9B,cAAc,CAAC,aAAa,GAAG,aAAa,CAAC,UAAU,CAAC,WAAW,CACjE,YAAY,CAAC,aAAa,EAC1B,cAAc,CAAC,OAAO,CAAC,UAAU,EAAE,EACnC,4BAA4B,CAC7B,CAAC;qBACH;iBACF;aACF;SACF;QACD,OAAO,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;IACzC,CAAC,CAAC,CAAC;AACL,CAAC;AAED,SAAS,KAAK,CACZ,gBAA0B,EAC1B,eAAyB,EACzB,iBAAwC;IAExC,IAAM,YAAY,GAAG,UAAC,GAA6B;QACjD,IAAM,GAAG,GAAG,aAAU,GAAG,sDAAgD,iBAAiB,CAAC,UAAU,MAAG,CAAC;QACzG,IAAM,OAAO,GAAG,GAAG,CAAC,IAAI,IAAI,SAAS,CAAC,WAAW,CAAC;QAClD,IAAM,CAAC,GAAG,IAAI,SAAS,CACrB,GAAG,EACH,OAAO,EACP,iBAAiB,CAAC,MAAM,EACxB,iBAAiB,CAAC,OAAO,EACzB,iBAAiB,EACjB,iBAAiB,CAAC,UAAU,CAC7B,CAAC;QACF,OAAO,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC3B,CAAC,CAAC;IAEF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,kBAAkB,IAAI,iBAAiB,CAAC,UAAU,EAAE;QACjF,IAAM,MAAI,GAAG,iBAAiB,CAAC,UAAU,CAAC;QAC1C,IAAM,WAAW,GAAW,iBAAiB,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC;QAChF,IAAM,iBAAiB,GAAa,CAAC,WAAW;YAC9C,CAAC,CAAC,EAAE;YACJ,CAAC,CAAC,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,UAAC,SAAS,IAAK,OAAA,SAAS,CAAC,WAAW,EAAE,EAAvB,CAAuB,CAAC,CAAC;QACvE,IACE,iBAAiB,CAAC,MAAM,KAAK,CAAC;YAC9B,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA1C,CAA0C,CAAC,EACjF;YACA,OAAO,IAAI,OAAO,CAAwB,UAAC,OAAO;gBAChD,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,MAAI,CAAC,CAAC;gBAChD,OAAO,CAAC,iBAAiB,CAAC,CAAC;YAC7B,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,UAAC,SAAS,IAAK,OAAA,eAAe,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAzC,CAAyC,CAAC,EAAE;YAC3F,OAAO,QAAQ,CAAC,MAAI,CAAC;iBAClB,IAAI,CAAC,UAAC,IAAI;gBACT,iBAAiB,CAAC,UAAU,GAAG,IAAI,CAAC;gBACpC,OAAO,iBAAiB,CAAC;YAC3B,CAAC,CAAC;iBACD,KAAK,CAAC,YAAY,CAAC,CAAC;SACxB;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,iBAAiB,CAAC,CAAC;AAC5C,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts new file mode 100644 index 0000000000..3442911a03 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts @@ -0,0 +1,48 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function exponentialRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export declare class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=exponentialRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map new file mode 100644 index 0000000000..a972a4c112 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAGzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAOD;;;GAGG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D;;OAEG;IACH,UAAU,EAAE,MAAM,CAAC;IACnB;;OAEG;IACH,aAAa,EAAE,MAAM,CAAC;IACtB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IACzB;;OAEG;IACH,gBAAgB,EAAE,MAAM,CAAC;IAEzB;;;;;;;;OAQG;gBAED,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js new file mode 100644 index 0000000000..82029bf35c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js @@ -0,0 +1,132 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { RestError } from "../restError"; +export function exponentialRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +var DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +var DEFAULT_CLIENT_RETRY_COUNT = 3; +var DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +var DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +var ExponentialRetryPolicy = /** @class */ (function (_super) { + __extends(ExponentialRetryPolicy, _super); + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + function ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + function isNumber(n) { + return typeof n === "number"; + } + _this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + ExponentialRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return retry(_this, request, response); }) + .catch(function (error) { return retry(_this, request, error.response, undefined, error); }); + }; + return ExponentialRetryPolicy; +}(BaseRequestPolicy)); +export { ExponentialRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, statusCode, retryData) { + if (statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505) { + return false; + } + var currentCount; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, response, retryData, requestError) { + retryData = updateRetryData(policy, retryData, requestError); + var isAborted = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(function () { return policy._nextPolicy.sendRequest(request.clone()); }) + .then(function (res) { return retry(policy, request, res, retryData, undefined); }) + .catch(function (err) { return retry(policy, request, response, retryData, err); }); + } + else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + var err = retryData.error || + new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); + return Promise.reject(err); + } + else { + return Promise.resolve(response); + } +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map new file mode 100644 index 0000000000..f424d43415 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/exponentialRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"exponentialRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/exponentialRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAczC,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED,IAAM,6BAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;AAChD,IAAM,0BAA0B,GAAG,CAAC,CAAC;AACrC,IAAM,iCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;AACpD,IAAM,iCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;AAEnD;;;GAGG;AACH;IAA4C,0CAAiB;IAkB3D;;;;;;;;OAQG;IACH,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAXC,SAAS,QAAQ,CAAC,CAAM;YACtB,OAAO,OAAO,CAAC,KAAK,QAAQ,CAAC;QAC/B,CAAC;QACD,KAAI,CAAC,UAAU,GAAG,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,0BAA0B,CAAC;QACjF,KAAI,CAAC,aAAa,GAAG,QAAQ,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,6BAA6B,CAAC;QAC7F,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;QACtC,KAAI,CAAC,gBAAgB,GAAG,QAAQ,CAAC,gBAAgB,CAAC;YAChD,CAAC,CAAC,gBAAgB;YAClB,CAAC,CAAC,iCAAiC,CAAC;;IACxC,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAKC;QAJC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAA9B,CAA8B,CAAC;aAClD,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,SAAS,EAAE,KAAK,CAAC,EAAtD,CAAsD,CAAC,CAAC;IAC9E,CAAC;IACH,6BAAC;AAAD,CAAC,AAvDD,CAA4C,iBAAiB,GAuD5D;;AAED;;;;;;;GAOG;AACH,SAAS,WAAW,CAClB,MAA8B,EAC9B,UAA8B,EAC9B,SAAoB;IAEpB,IACE,UAAU,IAAI,SAAS;QACvB,CAAC,UAAU,GAAG,GAAG,IAAI,UAAU,KAAK,GAAG,CAAC;QACxC,UAAU,KAAK,GAAG;QAClB,UAAU,KAAK,GAAG,EAClB;QACA,OAAO,KAAK,CAAC;KACd;IAED,IAAI,YAAoB,CAAC;IACzB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IAED,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;;GAMG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG;QAC1B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,KAAK,CACZ,MAA8B,EAC9B,OAAwB,EACxB,QAAgC,EAChC,SAAqB,EACrB,YAAyB;IAEzB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,YAAY,CAAC,CAAC;IAC7D,IAAM,SAAS,GAAwB,OAAO,CAAC,WAAW,IAAI,OAAO,CAAC,WAAW,CAAC,OAAO,CAAC;IAC1F,IAAI,CAAC,SAAS,IAAI,WAAW,CAAC,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,EAAE;QAC7E,OAAO,KAAK;aACT,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC;aAC9B,IAAI,CAAC,cAAM,OAAA,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAA/C,CAA+C,CAAC;aAC3D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,GAAG,EAAE,SAAS,EAAE,SAAS,CAAC,EAAjD,CAAiD,CAAC;aAChE,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,CAAC,EAAhD,CAAgD,CAAC,CAAC;KACrE;SAAM,IAAI,SAAS,IAAI,YAAY,IAAI,CAAC,QAAQ,EAAE;QACjD,qFAAqF;QACrF,IAAM,GAAG,GACP,SAAS,CAAC,KAAK;YACf,IAAI,SAAS,CACX,6BAA6B,EAC7B,SAAS,CAAC,kBAAkB,EAC5B,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAC3B,QAAQ,IAAI,QAAQ,CAAC,OAAO,EAC5B,QAAQ,CACT,CAAC;QACJ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;SAAM;QACL,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;KAClC;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts new file mode 100644 index 0000000000..d08160d26b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function generateClientRequestIdPolicy(requestIdHeaderName?: string): RequestPolicyFactory; +export declare class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + private _requestIdHeaderName; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _requestIdHeaderName: string); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=generateClientRequestIdPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map new file mode 100644 index 0000000000..fb5c7a1a06 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,6BAA6B,CAC3C,mBAAmB,SAA2B,GAC7C,oBAAoB,CAMtB;AAED,qBAAa,6BAA8B,SAAQ,iBAAiB;IAIhE,OAAO,CAAC,oBAAoB;gBAF5B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACzB,oBAAoB,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js new file mode 100644 index 0000000000..d7c311812d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function generateClientRequestIdPolicy(requestIdHeaderName) { + if (requestIdHeaderName === void 0) { requestIdHeaderName = "x-ms-client-request-id"; } + return { + create: function (nextPolicy, options) { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} +var GenerateClientRequestIdPolicy = /** @class */ (function (_super) { + __extends(GenerateClientRequestIdPolicy, _super); + function GenerateClientRequestIdPolicy(nextPolicy, options, _requestIdHeaderName) { + var _this = _super.call(this, nextPolicy, options) || this; + _this._requestIdHeaderName = _requestIdHeaderName; + return _this; + } + GenerateClientRequestIdPolicy.prototype.sendRequest = function (request) { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + }; + return GenerateClientRequestIdPolicy; +}(BaseRequestPolicy)); +export { GenerateClientRequestIdPolicy }; +//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map new file mode 100644 index 0000000000..56227faeaa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/generateClientRequestIdPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generateClientRequestIdPolicy.js","sourceRoot":"","sources":["../../../lib/policies/generateClientRequestIdPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,6BAA6B,CAC3C,mBAA8C;IAA9C,oCAAA,EAAA,8CAA8C;IAE9C,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,6BAA6B,CAAC,UAAU,EAAE,OAAO,EAAE,mBAAmB,CAAC,CAAC;QACrF,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmD,iDAAiB;IAClE,uCACE,UAAyB,EACzB,OAAiC,EACzB,oBAA4B;QAHtC,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHS,0BAAoB,GAApB,oBAAoB,CAAQ;;IAGtC,CAAC;IAEM,mDAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,EAAE;YACxD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,oBAAoB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;SACtE;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,oCAAC;AAAD,CAAC,AAfD,CAAmD,iBAAiB,GAenE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts new file mode 100644 index 0000000000..62399673aa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function logPolicy(logger?: any): RequestPolicyFactory; +export declare class LogPolicy extends BaseRequestPolicy { + logger?: any; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, logger?: any); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=logPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map new file mode 100644 index 0000000000..4108f58f1f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,SAAS,CAAC,MAAM,GAAE,GAAiB,GAAG,oBAAoB,CAMzE;AAED,qBAAa,SAAU,SAAQ,iBAAiB;IAC9C,MAAM,CAAC,EAAE,GAAG,CAAC;gBAGX,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,MAAM,GAAE,GAAiB;IAMpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js new file mode 100644 index 0000000000..cfa3e24d04 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function logPolicy(logger) { + if (logger === void 0) { logger = console.log; } + return { + create: function (nextPolicy, options) { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} +var LogPolicy = /** @class */ (function (_super) { + __extends(LogPolicy, _super); + function LogPolicy(nextPolicy, options, logger) { + if (logger === void 0) { logger = console.log; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.logger = logger; + return _this; + } + LogPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy.sendRequest(request).then(function (response) { return logResponse(_this, response); }); + }; + return LogPolicy; +}(BaseRequestPolicy)); +export { LogPolicy }; +function logResponse(policy, response) { + policy.logger(">> Request: " + JSON.stringify(response.request, undefined, 2)); + policy.logger(">> Response status code: " + response.status); + var responseBody = response.bodyAsText; + policy.logger(">> Body: " + responseBody); + return Promise.resolve(response); +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map new file mode 100644 index 0000000000..ef5ba965ad --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/logPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logPolicy.js","sourceRoot":"","sources":["../../../lib/policies/logPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAI/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,SAAS,CAAC,MAAyB;IAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;IACjD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,SAAS,CAAC,UAAU,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;QACpD,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA+B,6BAAiB;IAG9C,mBACE,UAAyB,EACzB,OAAiC,EACjC,MAAyB;QAAzB,uBAAA,EAAA,SAAc,OAAO,CAAC,GAAG;QAH3B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,MAAM,GAAG,MAAM,CAAC;;IACvB,CAAC;IAEM,+BAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAEC;QADC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,WAAW,CAAC,KAAI,EAAE,QAAQ,CAAC,EAA3B,CAA2B,CAAC,CAAC;IAC/F,CAAC;IACH,gBAAC;AAAD,CAAC,AAfD,CAA+B,iBAAiB,GAe/C;;AAED,SAAS,WAAW,CAClB,MAAiB,EACjB,QAA+B;IAE/B,MAAM,CAAC,MAAM,CAAC,iBAAe,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,SAAS,EAAE,CAAC,CAAG,CAAC,CAAC;IAC/E,MAAM,CAAC,MAAM,CAAC,8BAA4B,QAAQ,CAAC,MAAQ,CAAC,CAAC;IAC7D,IAAM,YAAY,GAAG,QAAQ,CAAC,UAAU,CAAC;IACzC,MAAM,CAAC,MAAM,CAAC,cAAY,YAAc,CAAC,CAAC;IAC1C,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts new file mode 100644 index 0000000000..3ff8522d3a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map new file mode 100644 index 0000000000..39d10a81e8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAQA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAOlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAQzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js new file mode 100644 index 0000000000..2fe6588317 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export function getDefaultUserAgentKey() { + return "x-ms-command-name"; +} +export function getPlatformSpecificData() { + var navigator = self.navigator; + var osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + return [osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map new file mode 100644 index 0000000000..d96a097c50 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAc/F,MAAM,UAAU,sBAAsB;IACpC,OAAO,mBAAmB,CAAC;AAC7B,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,SAAS,GAAG,IAAI,CAAC,SAAwB,CAAC;IAChD,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,CAAC,SAAS,CAAC,KAAK,IAAI,SAAS,CAAC,QAAQ,CAAC,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,CAAC;KAChE,CAAC;IAEF,OAAO,CAAC,MAAM,CAAC,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts new file mode 100644 index 0000000000..1fac88842b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts @@ -0,0 +1,4 @@ +import { TelemetryInfo } from "./userAgentPolicy"; +export declare function getDefaultUserAgentKey(): string; +export declare function getPlatformSpecificData(): TelemetryInfo[]; +//# sourceMappingURL=msRestUserAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map new file mode 100644 index 0000000000..9ad4183525 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,MAAM,mBAAmB,CAAC;AAGlD,wBAAgB,sBAAsB,IAAI,MAAM,CAE/C;AAED,wBAAgB,uBAAuB,IAAI,aAAa,EAAE,CAYzD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js new file mode 100644 index 0000000000..40622431fc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as os from "os"; +import { Constants } from "../util/constants"; +export function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +export function getPlatformSpecificData() { + var runtimeInfo = { + key: "Node", + value: process.version, + }; + var osInfo = { + key: "OS", + value: "(" + os.arch() + "-" + os.type() + "-" + os.release() + ")", + }; + return [runtimeInfo, osInfo]; +} +//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map new file mode 100644 index 0000000000..8db5ec536b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/msRestUserAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"msRestUserAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/msRestUserAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,MAAM,UAAU,sBAAsB;IACpC,OAAO,SAAS,CAAC,eAAe,CAAC,UAAU,CAAC;AAC9C,CAAC;AAED,MAAM,UAAU,uBAAuB;IACrC,IAAM,WAAW,GAAG;QAClB,GAAG,EAAE,MAAM;QACX,KAAK,EAAE,OAAO,CAAC,OAAO;KACvB,CAAC;IAEF,IAAM,MAAM,GAAG;QACb,GAAG,EAAE,IAAI;QACT,KAAK,EAAE,MAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,IAAI,EAAE,SAAI,EAAE,CAAC,OAAO,EAAE,MAAG;KACrD,CAAC;IAEF,OAAO,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC;AAC/B,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts new file mode 100644 index 0000000000..5c2b57402e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts @@ -0,0 +1,11 @@ +import { ProxySettings } from "../serviceClient"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +export declare function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike); + sendRequest(_request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map new file mode 100644 index 0000000000..ad17d1ce96 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD,wBAAgB,uBAAuB,CAAC,SAAS,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAErF;AAED,wBAAgB,WAAW,CAAC,cAAc,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAMhF;AAED,qBAAa,WAAY,SAAQ,iBAAiB;gBACpC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB;IAKjE,WAAW,CAAC,QAAQ,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAG9E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js new file mode 100644 index 0000000000..c8144dee5c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +var proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); +export function getDefaultProxySettings(_proxyUrl) { + return undefined; +} +export function proxyPolicy(_proxySettings) { + return { + create: function (_nextPolicy, _options) { + throw proxyNotSupportedInBrowser; + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options) { + var _this = _super.call(this, nextPolicy, options) || this; + throw proxyNotSupportedInBrowser; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (_request) { + throw proxyNotSupportedInBrowser; + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map new file mode 100644 index 0000000000..7fe9c5374c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.browser.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,IAAM,0BAA0B,GAAG,IAAI,KAAK,CAAC,qDAAqD,CAAC,CAAC;AAEpG,MAAM,UAAU,uBAAuB,CAAC,SAAkB;IACxD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,cAA8B;IACxD,OAAO;QACL,MAAM,EAAE,UAAC,WAA0B,EAAE,QAAkC;YACrE,MAAM,0BAA0B,CAAC;QACnC,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAChD,qBAAY,UAAyB,EAAE,OAAiC;QAAxE,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,MAAM,0BAA0B,CAAC;;IACnC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,QAAyB;QAC1C,MAAM,0BAA0B,CAAC;IACnC,CAAC;IACH,kBAAC;AAAD,CAAC,AATD,CAAiC,iBAAiB,GASjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts new file mode 100644 index 0000000000..565fce8ff8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts @@ -0,0 +1,24 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +/** + * @internal + */ +export declare const noProxyList: string[]; +/** + * @internal + */ +export declare function getEnvironmentValue(name: string): string | undefined; +/** + * @internal + */ +export declare function loadNoProxy(): string[]; +export declare function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined; +export declare function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory; +export declare class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, proxySettings: ProxySettings); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=proxyPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map new file mode 100644 index 0000000000..ab11973620 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAIjD;;GAEG;AACH,eAAO,MAAM,WAAW,EAAE,MAAM,EAAkB,CAAC;AAGnD;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,CAOpE;AA+CD;;GAEG;AACH,wBAAgB,WAAW,IAAI,MAAM,EAAE,CAUtC;AA4BD,wBAAgB,uBAAuB,CAAC,QAAQ,CAAC,EAAE,MAAM,GAAG,aAAa,GAAG,SAAS,CAiBpF;AAED,wBAAgB,WAAW,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAS/E;AAED,qBAAa,WAAY,SAAQ,iBAAiB;IAChD,aAAa,EAAE,aAAa,CAAC;gBAG3B,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,aAAa,EAAE,aAAa;IAMvB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAM7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js new file mode 100644 index 0000000000..2f0ef8629b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; +/** + * @internal + */ +export var noProxyList = loadNoProxy(); +var byPassedList = new Map(); +/** + * @internal + */ +export function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + var httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + var allProxy = getEnvironmentValue(Constants.ALL_PROXY); + var httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri) { + if (noProxyList.length === 0) { + return false; + } + var host = URLBuilder.parse(uri).getHost(); + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + var isBypassedFlag = false; + for (var _i = 0, noProxyList_1 = noProxyList; _i < noProxyList_1.length; _i++) { + var pattern = noProxyList_1[_i]; + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +export function loadNoProxy() { + var noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map(function (item) { return item.trim(); }) + .filter(function (item) { return item.length; }); + } + return []; +} +/** + * @internal + */ +function extractAuthFromUrl(url) { + var atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + var schemeIndex = url.indexOf("://"); + var authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + var auth = url.substring(authStart, atIndex); + var colonIndex = auth.indexOf(":"); + var hasPassword = colonIndex !== -1; + var username = hasPassword ? auth.substring(0, colonIndex) : auth; + var password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + var urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username: username, + password: password, + urlWithoutAuth: urlWithoutAuth, + }; +} +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + var _a = extractAuthFromUrl(proxyUrl), username = _a.username, password = _a.password, urlWithoutAuth = _a.urlWithoutAuth; + var parsedUrl = URLBuilder.parse(urlWithoutAuth); + var schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username: username, + password: password, + }; +} +export function proxyPolicy(proxySettings) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: function (nextPolicy, options) { + return new ProxyPolicy(nextPolicy, options, proxySettings); + }, + }; +} +var ProxyPolicy = /** @class */ (function (_super) { + __extends(ProxyPolicy, _super); + function ProxyPolicy(nextPolicy, options, proxySettings) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.proxySettings = proxySettings; + return _this; + } + ProxyPolicy.prototype.sendRequest = function (request) { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + }; + return ProxyPolicy; +}(BaseRequestPolicy)); +export { ProxyPolicy }; +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map new file mode 100644 index 0000000000..86f8ba63e2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/proxyPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyPolicy.js","sourceRoot":"","sources":["../../../lib/policies/proxyPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC;;GAEG;AACH,MAAM,CAAC,IAAM,WAAW,GAAa,WAAW,EAAE,CAAC;AACnD,IAAM,YAAY,GAAyB,IAAI,GAAG,EAAE,CAAC;AAErD;;GAEG;AACH,MAAM,UAAU,mBAAmB,CAAC,IAAY;IAC9C,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QACrB,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC1B;SAAM,IAAI,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE;QAC1C,OAAO,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;KACxC;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,yBAAyB;IAChC,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,SAAS,CAAC;KAClB;IAED,IAAM,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAC9D,IAAM,QAAQ,GAAG,mBAAmB,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC1D,IAAM,SAAS,GAAG,mBAAmB,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC;IAE5D,OAAO,UAAU,IAAI,QAAQ,IAAI,SAAS,CAAC;AAC7C,CAAC;AAED,iEAAiE;AACjE,2FAA2F;AAC3F,mMAAmM;AACnM,SAAS,UAAU,CAAC,GAAW;IAC7B,IAAI,WAAW,CAAC,MAAM,KAAK,CAAC,EAAE;QAC5B,OAAO,KAAK,CAAC;KACd;IACD,IAAM,IAAI,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,EAAG,CAAC;IAC9C,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE;QAC1B,OAAO,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;KAC/B;IACD,IAAI,cAAc,GAAG,KAAK,CAAC;IAC3B,KAAsB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA9B,IAAM,OAAO,oBAAA;QAChB,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;YACtB,mEAAmE;YACnE,mDAAmD;YACnD,IAAI,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,EAAE;gBAC1B,cAAc,GAAG,IAAI,CAAC;aACvB;iBAAM;gBACL,IAAI,IAAI,CAAC,MAAM,KAAK,OAAO,CAAC,MAAM,GAAG,CAAC,IAAI,IAAI,KAAK,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;oBACnE,cAAc,GAAG,IAAI,CAAC;iBACvB;aACF;SACF;aAAM;YACL,IAAI,IAAI,KAAK,OAAO,EAAE;gBACpB,cAAc,GAAG,IAAI,CAAC;aACvB;SACF;KACF;IACD,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACvC,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,WAAW;IACzB,IAAM,OAAO,GAAG,mBAAmB,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IACxD,IAAI,OAAO,EAAE;QACX,OAAO,OAAO;aACX,KAAK,CAAC,GAAG,CAAC;aACV,GAAG,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,IAAI,EAAE,EAAX,CAAW,CAAC;aAC1B,MAAM,CAAC,UAAC,IAAI,IAAK,OAAA,IAAI,CAAC,MAAM,EAAX,CAAW,CAAC,CAAC;KAClC;IAED,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;GAEG;AACH,SAAS,kBAAkB,CACzB,GAAW;IAEX,IAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACjC,IAAI,OAAO,KAAK,CAAC,CAAC,EAAE;QAClB,OAAO,EAAE,cAAc,EAAE,GAAG,EAAE,CAAC;KAChC;IAED,IAAM,WAAW,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IACvC,IAAM,SAAS,GAAG,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC3D,IAAM,IAAI,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;IAC/C,IAAM,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;IACrC,IAAM,WAAW,GAAG,UAAU,KAAK,CAAC,CAAC,CAAC;IACtC,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACpE,IAAM,QAAQ,GAAG,WAAW,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1E,IAAM,cAAc,GAAG,GAAG,CAAC,SAAS,CAAC,CAAC,EAAE,SAAS,CAAC,GAAG,GAAG,CAAC,SAAS,CAAC,OAAO,GAAG,CAAC,CAAC,CAAC;IAChF,OAAO;QACL,QAAQ,UAAA;QACR,QAAQ,UAAA;QACR,cAAc,gBAAA;KACf,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,uBAAuB,CAAC,QAAiB;IACvD,IAAI,CAAC,QAAQ,EAAE;QACb,QAAQ,GAAG,yBAAyB,EAAE,CAAC;QACvC,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,SAAS,CAAC;SAClB;KACF;IAEK,IAAA,KAAyC,kBAAkB,CAAC,QAAQ,CAAC,EAAnE,QAAQ,cAAA,EAAE,QAAQ,cAAA,EAAE,cAAc,oBAAiC,CAAC;IAC5E,IAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,CAAC;IACnD,IAAM,MAAM,GAAG,SAAS,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC,SAAS,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC;IAC1E,OAAO;QACL,IAAI,EAAE,MAAM,GAAG,SAAS,CAAC,OAAO,EAAE;QAClC,IAAI,EAAE,MAAM,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,EAAE,IAAI,IAAI,CAAC;QAClD,QAAQ,UAAA;QACR,QAAQ,UAAA;KACT,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,aAA6B;IACvD,IAAI,CAAC,aAAa,EAAE;QAClB,aAAa,GAAG,uBAAuB,EAAE,CAAC;KAC3C;IACD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,WAAW,CAAC,UAAU,EAAE,OAAO,EAAE,aAAc,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAiC,+BAAiB;IAGhD,qBACE,UAAyB,EACzB,OAAiC,EACjC,aAA4B;QAH9B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,aAAa,GAAG,aAAa,CAAC;;IACrC,CAAC;IAEM,iCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,aAAa,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;YACtD,OAAO,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC5C;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IACH,kBAAC;AAAD,CAAC,AAlBD,CAAiC,iBAAiB,GAkBjD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts new file mode 100644 index 0000000000..308149b78b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts @@ -0,0 +1,18 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + handleRedirects: boolean; + maxRetries?: number; +} +export declare const DefaultRedirectOptions: RedirectOptions; +export declare function redirectPolicy(maximumRetries?: number): RequestPolicyFactory; +export declare class RedirectPolicy extends BaseRequestPolicy { + readonly maxRetries: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, maxRetries?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=redirectPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map new file mode 100644 index 0000000000..94f16bbc35 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB;;GAEG;AACH,MAAM,WAAW,eAAe;IAI9B,eAAe,EAAE,OAAO,CAAC;IAMzB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,eAAO,MAAM,sBAAsB,EAAE,eAGpC,CAAC;AAEF,wBAAgB,cAAc,CAAC,cAAc,SAAK,GAAG,oBAAoB,CAMxE;AAED,qBAAa,cAAe,SAAQ,iBAAiB;IAIjD,QAAQ,CAAC,UAAU;gBAFnB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,UAAU,SAAK;IAKnB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js new file mode 100644 index 0000000000..f6eda4bbff --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { URLBuilder } from "../url"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export var DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +export function redirectPolicy(maximumRetries) { + if (maximumRetries === void 0) { maximumRetries = 20; } + return { + create: function (nextPolicy, options) { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +var RedirectPolicy = /** @class */ (function (_super) { + __extends(RedirectPolicy, _super); + function RedirectPolicy(nextPolicy, options, maxRetries) { + if (maxRetries === void 0) { maxRetries = 20; } + var _this = _super.call(this, nextPolicy, options) || this; + _this.maxRetries = maxRetries; + return _this; + } + RedirectPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request) + .then(function (response) { return handleRedirect(_this, response, 0); }); + }; + return RedirectPolicy; +}(BaseRequestPolicy)); +export { RedirectPolicy }; +function handleRedirect(policy, response, currentRetries) { + var request = response.request, status = response.status; + var locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries))) { + var builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then(function (res) { return handleRedirect(policy, res, currentRetries + 1); }) + .then(function (res) { return recordRedirect(res, request.url); }); + } + return Promise.resolve(response); +} +function recordRedirect(response, redirect) { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map new file mode 100644 index 0000000000..af97d7c38f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/redirectPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"redirectPolicy.js","sourceRoot":"","sources":["../../../lib/policies/redirectPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEpC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAkBzB,MAAM,CAAC,IAAM,sBAAsB,GAAoB;IACrD,eAAe,EAAE,IAAI;IACrB,UAAU,EAAE,EAAE;CACf,CAAC;AAEF,MAAM,UAAU,cAAc,CAAC,cAAmB;IAAnB,+BAAA,EAAA,mBAAmB;IAChD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,cAAc,CAAC,UAAU,EAAE,OAAO,EAAE,cAAc,CAAC,CAAC;QACjE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAoC,kCAAiB;IACnD,wBACE,UAAyB,EACzB,OAAiC,EACxB,UAAe;QAAf,2BAAA,EAAA,eAAe;QAH1B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,gBAAU,GAAV,UAAU,CAAK;;IAG1B,CAAC;IAEM,oCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,cAAc,CAAC,KAAI,EAAE,QAAQ,EAAE,CAAC,CAAC,EAAjC,CAAiC,CAAC,CAAC;IAC3D,CAAC;IACH,qBAAC;AAAD,CAAC,AAdD,CAAoC,iBAAiB,GAcpD;;AAED,SAAS,cAAc,CACrB,MAAsB,EACtB,QAA+B,EAC/B,cAAsB;IAEd,IAAA,OAAO,GAAa,QAAQ,QAArB,EAAE,MAAM,GAAK,QAAQ,OAAb,CAAc;IACrC,IAAM,cAAc,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxD,IACE,cAAc;QACd,CAAC,MAAM,KAAK,GAAG;YACb,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YAC5D,CAAC,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC;YACpE,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,OAAO,CAAC,MAAM,CAAC;YAC7C,MAAM,KAAK,GAAG,CAAC;QACjB,CAAC,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,OAAO,CAAC,aAAa,CAAC;YAC9E,CAAC,OAAO,CAAC,aAAa,KAAK,SAAS,IAAI,cAAc,GAAG,MAAM,CAAC,UAAU,CAAC,CAAC,EAC9E;QACA,IAAM,OAAO,GAAG,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC9C,OAAO,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;QAChC,OAAO,CAAC,GAAG,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;QAEjC,uEAAuE;QACvE,+EAA+E;QAC/E,+GAA+G;QAC/G,IAAI,CAAC,MAAM,KAAK,GAAG,IAAI,MAAM,KAAK,GAAG,CAAC,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,EAAE;YACnE,OAAO,CAAC,MAAM,GAAG,KAAK,CAAC;YACvB,OAAO,OAAO,CAAC,IAAI,CAAC;SACrB;QAED,OAAO,MAAM,CAAC,WAAW;aACtB,WAAW,CAAC,OAAO,CAAC;aACpB,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,MAAM,EAAE,GAAG,EAAE,cAAc,GAAG,CAAC,CAAC,EAA/C,CAA+C,CAAC;aAC9D,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,cAAc,CAAC,GAAG,EAAE,OAAO,CAAC,GAAG,CAAC,EAAhC,CAAgC,CAAC,CAAC;KACpD;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED,SAAS,cAAc,CAAC,QAA+B,EAAE,QAAgB;IACvE,oEAAoE;IACpE,wCAAwC;IACxC,IAAI,CAAC,QAAQ,CAAC,UAAU,EAAE;QACxB,QAAQ,CAAC,UAAU,GAAG,IAAI,CAAC;QAC3B,QAAQ,CAAC,GAAG,GAAG,QAAQ,CAAC;KACzB;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts new file mode 100644 index 0000000000..ff3e61d168 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts @@ -0,0 +1,71 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export declare type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} +export declare abstract class BaseRequestPolicy implements RequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike); + abstract sendRequest(webResource: WebResourceLike): Promise; + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export declare class RequestPolicyOptions implements RequestPolicyOptionsLike { + private _logger?; + constructor(_logger?: HttpPipelineLogger | undefined); + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} +//# sourceMappingURL=requestPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map new file mode 100644 index 0000000000..41e5568efc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAEjD;;GAEG;AACH,oBAAY,oBAAoB,GAAG;IACjC,MAAM,CAAC,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,GAAG,aAAa,CAAC;CACrF,CAAC;AAEF,MAAM,WAAW,aAAa;IAC5B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC,CAAC;CAC3E;AAED,8BAAsB,iBAAkB,YAAW,aAAa;IAE5D,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAF7C,SAAS,aACE,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB;aAG7B,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAEzF;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAIzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAGlE;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC;;;;OAIG;IACH,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO,CAAC;IAEnD;;;;;OAKG;IACH,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI,CAAC;CAC5D;AAED;;GAEG;AACH,qBAAa,oBAAqB,YAAW,wBAAwB;IACvD,OAAO,CAAC,OAAO,CAAC;gBAAR,OAAO,CAAC,gCAAoB;IAEhD;;;;OAIG;IACI,SAAS,CAAC,QAAQ,EAAE,oBAAoB,GAAG,OAAO;IAQzD;;;;;OAKG;IACI,GAAG,CAAC,QAAQ,EAAE,oBAAoB,EAAE,OAAO,EAAE,MAAM,GAAG,IAAI;CAKlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js new file mode 100644 index 0000000000..16fb9b300d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +var BaseRequestPolicy = /** @class */ (function () { + function BaseRequestPolicy(_nextPolicy, _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + BaseRequestPolicy.prototype.shouldLog = function (logLevel) { + return this._options.shouldLog(logLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + BaseRequestPolicy.prototype.log = function (logLevel, message) { + this._options.log(logLevel, message); + }; + return BaseRequestPolicy; +}()); +export { BaseRequestPolicy }; +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +var RequestPolicyOptions = /** @class */ (function () { + function RequestPolicyOptions(_logger) { + this._logger = _logger; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + RequestPolicyOptions.prototype.shouldLog = function (logLevel) { + return (!!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel); + }; + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + RequestPolicyOptions.prototype.log = function (logLevel, message) { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + }; + return RequestPolicyOptions; +}()); +export { RequestPolicyOptions }; +//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map new file mode 100644 index 0000000000..506b5bb736 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/requestPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"requestPolicy.js","sourceRoot":"","sources":["../../../lib/policies/requestPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAC;AAc/D;IACE,2BACW,WAA0B,EAC1B,QAAkC;QADlC,gBAAW,GAAX,WAAW,CAAe;QAC1B,aAAQ,GAAR,QAAQ,CAA0B;IAC1C,CAAC;IAIJ;;;;OAIG;IACI,qCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACI,+BAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;IACvC,CAAC;IACH,wBAAC;AAAD,CAAC,AA1BD,IA0BC;;AAsBD;;GAEG;AACH;IACE,8BAAoB,OAA4B;QAA5B,YAAO,GAAP,OAAO,CAAqB;IAAG,CAAC;IAEpD;;;;OAIG;IACI,wCAAS,GAAhB,UAAiB,QAA8B;QAC7C,OAAO,CACL,CAAC,CAAC,IAAI,CAAC,OAAO;YACd,QAAQ,KAAK,oBAAoB,CAAC,GAAG;YACrC,QAAQ,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,CACzC,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,kCAAG,GAAV,UAAW,QAA8B,EAAE,OAAe;QACxD,IAAI,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;SACrC;IACH,CAAC;IACH,2BAAC;AAAD,CAAC,AA3BD,IA2BC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts new file mode 100644 index 0000000000..305f617583 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts @@ -0,0 +1,10 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function rpRegistrationPolicy(retryTimeout?: number): RequestPolicyFactory; +export declare class RPRegistrationPolicy extends BaseRequestPolicy { + readonly _retryTimeout: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, _retryTimeout?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=rpRegistrationPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map new file mode 100644 index 0000000000..51bbc8ae66 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,oBAAoB,CAAC,YAAY,SAAK,GAAG,oBAAoB,CAM5E;AAED,qBAAa,oBAAqB,SAAQ,iBAAiB;IAIvD,QAAQ,CAAC,aAAa;gBAFtB,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACxB,aAAa,SAAK;IAKtB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js new file mode 100644 index 0000000000..ad7107fffd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js @@ -0,0 +1,163 @@ +import { __extends } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function rpRegistrationPolicy(retryTimeout) { + if (retryTimeout === void 0) { retryTimeout = 30; } + return { + create: function (nextPolicy, options) { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} +var RPRegistrationPolicy = /** @class */ (function (_super) { + __extends(RPRegistrationPolicy, _super); + function RPRegistrationPolicy(nextPolicy, options, _retryTimeout) { + if (_retryTimeout === void 0) { _retryTimeout = 30; } + var _this = _super.call(this, nextPolicy, options) || this; + _this._retryTimeout = _retryTimeout; + return _this; + } + RPRegistrationPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .then(function (response) { return registerIfNeeded(_this, request, response); }); + }; + return RPRegistrationPolicy; +}(BaseRequestPolicy)); +export { RPRegistrationPolicy }; +function registerIfNeeded(policy, request, response) { + if (response.status === 409) { + var rpName = checkRPNotRegisteredError(response.bodyAsText); + if (rpName) { + var urlPrefix = extractSubscriptionUrl(request.url); + return (registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(function () { return false; }) + .then(function (registrationStatus) { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + })); + } + } + return Promise.resolve(response); +} +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials(originalRequest, reuseUrlToo) { + if (reuseUrlToo === void 0) { reuseUrlToo = false; } + var reqOptions = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + return reqOptions; +} +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body) { + var result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } + catch (err) { + // do nothing; + } + if (responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration") { + var matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url) { + var result; + var matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } + else { + throw new Error("Unable to extract subscriptionId from the given url - " + url + "."); + } + return result; +} +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP(policy, urlPrefix, provider, originalRequest) { + var postUrl = urlPrefix + "providers/" + provider + "/register?api-version=2016-02-01"; + var getUrl = urlPrefix + "providers/" + provider + "?api-version=2016-02-01"; + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + return policy._nextPolicy.sendRequest(reqOptions).then(function (response) { + if (response.status !== 200) { + throw new Error("Autoregistration of " + provider + " failed. Please try registering manually."); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus(policy, url, originalRequest) { + var reqOptions = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + return policy._nextPolicy.sendRequest(reqOptions).then(function (res) { + var obj = res.parsedBody; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } + else { + return utils + .delay(policy._retryTimeout * 1000) + .then(function () { return getRegistrationStatus(policy, url, originalRequest); }); + } + }); +} +//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map new file mode 100644 index 0000000000..848ce2fad1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/rpRegistrationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rpRegistrationPolicy.js","sourceRoot":"","sources":["../../../lib/policies/rpRegistrationPolicy.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,oBAAoB,CAAC,YAAiB;IAAjB,6BAAA,EAAA,iBAAiB;IACpD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACrE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAA0C,wCAAiB;IACzD,8BACE,UAAyB,EACzB,OAAiC,EACxB,aAAkB;QAAlB,8BAAA,EAAA,kBAAkB;QAH7B,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHU,mBAAa,GAAb,aAAa,CAAK;;IAG7B,CAAC;IAEM,0CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,IAAI,CAAC,UAAC,QAAQ,IAAK,OAAA,gBAAgB,CAAC,KAAI,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAzC,CAAyC,CAAC,CAAC;IACnE,CAAC;IACH,2BAAC;AAAD,CAAC,AAdD,CAA0C,iBAAiB,GAc1D;;AAED,SAAS,gBAAgB,CACvB,MAA4B,EAC5B,OAAwB,EACxB,QAA+B;IAE/B,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;QAC3B,IAAM,MAAM,GAAG,yBAAyB,CAAC,QAAQ,CAAC,UAAoB,CAAC,CAAC;QACxE,IAAI,MAAM,EAAE;YACV,IAAM,SAAS,GAAG,sBAAsB,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;YACtD,OAAO,CACL,UAAU,CAAC,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,CAAC;gBAC5C,wFAAwF;gBACxF,kFAAkF;gBAClF,uFAAuF;iBACtF,KAAK,CAAC,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;iBAClB,IAAI,CAAC,UAAC,kBAAkB;gBACvB,IAAI,kBAAkB,EAAE;oBACtB,2EAA2E;oBAC3E,0EAA0E;oBAC1E,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;oBACpE,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,CAAC;iBACxD;gBACD,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC,CACL,CAAC;SACH;KACF;IAED,OAAO,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC;AACnC,CAAC;AAED;;;;;GAKG;AACH,SAAS,oBAAoB,CAC3B,eAAgC,EAChC,WAAmB;IAAnB,4BAAA,EAAA,mBAAmB;IAEnB,IAAM,UAAU,GAAoB,eAAe,CAAC,KAAK,EAAE,CAAC;IAC5D,IAAI,WAAW,EAAE;QACf,UAAU,CAAC,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;KACtC;IAED,wEAAwE;IACxE,iDAAiD;IACjD,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,KAAK,CAAC,YAAY,EAAE,CAAC,CAAC;IAEvE,uCAAuC;IACvC,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;IAE1E,OAAO,UAAU,CAAC;AACpB,CAAC;AAED;;;;;GAKG;AACH,SAAS,yBAAyB,CAAC,IAAY;IAC7C,IAAI,MAAM,EAAE,YAAY,CAAC;IACzB,IAAI,IAAI,EAAE;QACR,IAAI;YACF,YAAY,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;SACjC;QAAC,OAAO,GAAG,EAAE;YACZ,cAAc;SACf;QACD,IACE,YAAY;YACZ,YAAY,CAAC,KAAK;YAClB,YAAY,CAAC,KAAK,CAAC,OAAO;YAC1B,YAAY,CAAC,KAAK,CAAC,IAAI;YACvB,YAAY,CAAC,KAAK,CAAC,IAAI,KAAK,iCAAiC,EAC7D;YACA,IAAM,QAAQ,GAAG,YAAY,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC/D,IAAI,QAAQ,EAAE;gBACZ,MAAM,GAAG,QAAQ,CAAC,GAAG,EAAE,CAAC;aACzB;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,SAAS,sBAAsB,CAAC,GAAW;IACzC,IAAI,MAAM,CAAC;IACX,IAAM,QAAQ,GAAG,GAAG,CAAC,KAAK,CAAC,mCAAmC,CAAC,CAAC;IAChE,IAAI,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAAC,EAAE;QAC3B,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;KACtB;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,2DAAyD,GAAG,MAAG,CAAC,CAAC;KAClF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,UAAU,CACjB,MAA4B,EAC5B,SAAiB,EACjB,QAAgB,EAChB,eAAgC;IAEhC,IAAM,OAAO,GAAM,SAAS,kBAAa,QAAQ,qCAAkC,CAAC;IACpF,IAAM,MAAM,GAAM,SAAS,kBAAa,QAAQ,4BAAyB,CAAC;IAC1E,IAAM,UAAU,GAAG,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACzD,UAAU,CAAC,MAAM,GAAG,MAAM,CAAC;IAC3B,UAAU,CAAC,GAAG,GAAG,OAAO,CAAC;IAEzB,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;QAC9D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,yBAAuB,QAAQ,8CAA2C,CAAC,CAAC;SAC7F;QACD,OAAO,qBAAqB,CAAC,MAAM,EAAE,MAAM,EAAE,eAAe,CAAC,CAAC;IAChE,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,SAAS,qBAAqB,CAC5B,MAA4B,EAC5B,GAAW,EACX,eAAgC;IAEhC,IAAM,UAAU,GAAQ,oBAAoB,CAAC,eAAe,CAAC,CAAC;IAC9D,UAAU,CAAC,GAAG,GAAG,GAAG,CAAC;IACrB,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC;IAE1B,OAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;QACzD,IAAM,GAAG,GAAG,GAAG,CAAC,UAAiB,CAAC;QAClC,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,iBAAiB,IAAI,GAAG,CAAC,iBAAiB,KAAK,YAAY,EAAE;YACrF,OAAO,IAAI,CAAC;SACb;aAAM;YACL,OAAO,KAAK;iBACT,KAAK,CAAC,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;iBAClC,IAAI,CAAC,cAAM,OAAA,qBAAqB,CAAC,MAAM,EAAE,GAAG,EAAE,eAAe,CAAC,EAAnD,CAAmD,CAAC,CAAC;SACpE;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts new file mode 100644 index 0000000000..b4b2f2df44 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts @@ -0,0 +1,12 @@ +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicyFactory, RequestPolicy, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare function signingPolicy(authenticationProvider: ServiceClientCredentials): RequestPolicyFactory; +export declare class SigningPolicy extends BaseRequestPolicy { + authenticationProvider: ServiceClientCredentials; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, authenticationProvider: ServiceClientCredentials); + signRequest(request: WebResourceLike): Promise; + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=signingPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map new file mode 100644 index 0000000000..91ae8e81f2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,wBAAwB,EAAE,MAAM,yCAAyC,CAAC;AACnF,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,aAAa,EACb,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,wBAAgB,aAAa,CAC3B,sBAAsB,EAAE,wBAAwB,GAC/C,oBAAoB,CAMtB;AAED,qBAAa,aAAc,SAAQ,iBAAiB;IAIzC,sBAAsB,EAAE,wBAAwB;gBAFvD,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EAC1B,sBAAsB,EAAE,wBAAwB;IAKzD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;IAIxD,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js new file mode 100644 index 0000000000..6a3be13bd0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function signingPolicy(authenticationProvider) { + return { + create: function (nextPolicy, options) { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} +var SigningPolicy = /** @class */ (function (_super) { + __extends(SigningPolicy, _super); + function SigningPolicy(nextPolicy, options, authenticationProvider) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.authenticationProvider = authenticationProvider; + return _this; + } + SigningPolicy.prototype.signRequest = function (request) { + return this.authenticationProvider.signRequest(request); + }; + SigningPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this.signRequest(request).then(function (nextRequest) { + return _this._nextPolicy.sendRequest(nextRequest); + }); + }; + return SigningPolicy; +}(BaseRequestPolicy)); +export { SigningPolicy }; +//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map new file mode 100644 index 0000000000..1cc0247b24 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/signingPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"signingPolicy.js","sourceRoot":"","sources":["../../../lib/policies/signingPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,UAAU,aAAa,CAC3B,sBAAgD;IAEhD,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,aAAa,CAAC,UAAU,EAAE,OAAO,EAAE,sBAAsB,CAAC,CAAC;QACxE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAmC,iCAAiB;IAClD,uBACE,UAAyB,EACzB,OAAiC,EAC1B,sBAAgD;QAHzD,YAKE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAC3B;QAHQ,4BAAsB,GAAtB,sBAAsB,CAA0B;;IAGzD,CAAC;IAED,mCAAW,GAAX,UAAY,OAAwB;QAClC,OAAO,IAAI,CAAC,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC1D,CAAC;IAEM,mCAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,UAAC,WAAW;YAChD,OAAA,KAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC;QAAzC,CAAyC,CAC1C,CAAC;IACJ,CAAC;IACH,oBAAC;AAAD,CAAC,AAlBD,CAAmC,iBAAiB,GAkBnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts new file mode 100644 index 0000000000..153e5604c5 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts @@ -0,0 +1,37 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} +export declare function systemErrorRetryPolicy(retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number): RequestPolicyFactory; +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export declare class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL: number; + DEFAULT_CLIENT_RETRY_COUNT: number; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL: number; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL: number; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryCount?: number, retryInterval?: number, minRetryInterval?: number, maxRetryInterval?: number); + sendRequest(request: WebResourceLike): Promise; +} +//# sourceMappingURL=systemErrorRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map new file mode 100644 index 0000000000..c38e714f10 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,MAAM,WAAW,SAAS;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,UAAU,CAAC;CACpB;AAED,MAAM,WAAW,UAAW,SAAQ,KAAK;IACvC,OAAO,EAAE,MAAM,CAAC;IAChB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED,wBAAgB,sBAAsB,CACpC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM,GACxB,oBAAoB,CAatB;AAED;;;;;;;;;GASG;AACH,qBAAa,sBAAuB,SAAQ,iBAAiB;IAC3D,UAAU,EAAE,MAAM,CAAC;IACnB,aAAa,EAAE,MAAM,CAAC;IACtB,gBAAgB,EAAE,MAAM,CAAC;IACzB,gBAAgB,EAAE,MAAM,CAAC;IACzB,6BAA6B,SAAa;IAC1C,0BAA0B,SAAK;IAC/B,iCAAiC,SAAa;IAC9C,iCAAiC,SAAY;gBAG3C,UAAU,EAAE,aAAa,EACzB,OAAO,EAAE,wBAAwB,EACjC,UAAU,CAAC,EAAE,MAAM,EACnB,aAAa,CAAC,EAAE,MAAM,EACtB,gBAAgB,CAAC,EAAE,MAAM,EACzB,gBAAgB,CAAC,EAAE,MAAM;IAgBpB,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAK7E"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js new file mode 100644 index 0000000000..06b7c64fd4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import * as utils from "../util/utils"; +import { BaseRequestPolicy, } from "./requestPolicy"; +export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + return { + create: function (nextPolicy, options) { + return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); + }, + }; +} +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +var SystemErrorRetryPolicy = /** @class */ (function (_super) { + __extends(SystemErrorRetryPolicy, _super); + function SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + _this.DEFAULT_CLIENT_RETRY_COUNT = 3; + _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + _this.retryCount = typeof retryCount === "number" ? retryCount : _this.DEFAULT_CLIENT_RETRY_COUNT; + _this.retryInterval = + typeof retryInterval === "number" ? retryInterval : _this.DEFAULT_CLIENT_RETRY_INTERVAL; + _this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : _this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + _this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : _this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + return _this; + } + SystemErrorRetryPolicy.prototype.sendRequest = function (request) { + var _this = this; + return this._nextPolicy + .sendRequest(request.clone()) + .catch(function (error) { return retry(_this, request, error.response, error); }); + }; + return SystemErrorRetryPolicy; +}(BaseRequestPolicy)); +export { SystemErrorRetryPolicy }; +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy, retryData) { + var currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } + else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData(policy, retryData, err) { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + retryData.error = err; + } + // Adjust retry count + retryData.retryCount++; + // Adjust retry interval + var incrementDelta = Math.pow(2, retryData.retryCount) - 1; + var boundedRandDelta = policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + retryData.retryInterval = Math.min(policy.minRetryInterval + incrementDelta, policy.maxRetryInterval); + return retryData; +} +function retry(policy, request, operationResponse, err, retryData) { + return __awaiter(this, void 0, void 0, function () { + var error_1; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + retryData = updateRetryData(policy, retryData, err); + if (!(err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT"))) return [3 /*break*/, 5]; + _a.label = 1; + case 1: + _a.trys.push([1, 3, , 4]); + return [4 /*yield*/, utils.delay(retryData.retryInterval)]; + case 2: + _a.sent(); + return [2 /*return*/, policy._nextPolicy.sendRequest(request.clone())]; + case 3: + error_1 = _a.sent(); + return [2 /*return*/, retry(policy, request, operationResponse, error_1, retryData)]; + case 4: return [3 /*break*/, 6]; + case 5: + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return [2 /*return*/, Promise.reject(retryData.error)]; + } + return [2 /*return*/, operationResponse]; + case 6: return [2 /*return*/]; + } + }); + }); +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map new file mode 100644 index 0000000000..3225fab138 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/systemErrorRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"systemErrorRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/systemErrorRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAG/F,OAAO,KAAK,KAAK,MAAM,eAAe,CAAC;AAEvC,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAczB,MAAM,UAAU,sBAAsB,CACpC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;IAEzB,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,sBAAsB,CAC/B,UAAU,EACV,OAAO,EACP,UAAU,EACV,aAAa,EACb,gBAAgB,EAChB,gBAAgB,CACjB,CAAC;QACJ,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;;;;;GASG;AACH;IAA4C,0CAAiB;IAU3D,gCACE,UAAyB,EACzB,OAAiC,EACjC,UAAmB,EACnB,aAAsB,EACtB,gBAAyB,EACzB,gBAAyB;QAN3B,YAQE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAY3B;QAzBD,mCAA6B,GAAG,IAAI,GAAG,EAAE,CAAC;QAC1C,gCAA0B,GAAG,CAAC,CAAC;QAC/B,uCAAiC,GAAG,IAAI,GAAG,EAAE,CAAC;QAC9C,uCAAiC,GAAG,IAAI,GAAG,CAAC,CAAC;QAW3C,KAAI,CAAC,UAAU,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,KAAI,CAAC,0BAA0B,CAAC;QAChG,KAAI,CAAC,aAAa;YAChB,OAAO,aAAa,KAAK,QAAQ,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,KAAI,CAAC,6BAA6B,CAAC;QACzF,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;QAC7C,KAAI,CAAC,gBAAgB;YACnB,OAAO,gBAAgB,KAAK,QAAQ;gBAClC,CAAC,CAAC,gBAAgB;gBAClB,CAAC,CAAC,KAAI,CAAC,iCAAiC,CAAC;;IAC/C,CAAC;IAEM,4CAAW,GAAlB,UAAmB,OAAwB;QAA3C,iBAIC;QAHC,OAAO,IAAI,CAAC,WAAW;aACpB,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC;aAC5B,KAAK,CAAC,UAAC,KAAK,IAAK,OAAA,KAAK,CAAC,KAAI,EAAE,OAAO,EAAE,KAAK,CAAC,QAAQ,EAAE,KAAK,CAAC,EAA3C,CAA2C,CAAC,CAAC;IACnE,CAAC;IACH,6BAAC;AAAD,CAAC,AArCD,CAA4C,iBAAiB,GAqC5D;;AAED;;;;;;GAMG;AACH,SAAS,WAAW,CAAC,MAA8B,EAAE,SAAoB;IACvE,IAAI,YAAY,CAAC;IACjB,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CAAC,gEAAgE,CAAC,CAAC;KACnF;SAAM;QACL,YAAY,GAAG,SAAS,IAAI,SAAS,CAAC,UAAU,CAAC;KAClD;IACD,OAAO,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC;AAC1C,CAAC;AAED;;;;;GAKG;AACH,SAAS,eAAe,CACtB,MAA8B,EAC9B,SAAqB,EACrB,GAAgB;IAEhB,IAAI,CAAC,SAAS,EAAE;QACd,SAAS,GAAG;YACV,UAAU,EAAE,CAAC;YACb,aAAa,EAAE,CAAC;SACjB,CAAC;KACH;IAED,IAAI,GAAG,EAAE;QACP,IAAI,SAAS,CAAC,KAAK,EAAE;YACnB,GAAG,CAAC,UAAU,GAAG,SAAS,CAAC,KAAK,CAAC;SAClC;QAED,SAAS,CAAC,KAAK,GAAG,GAAG,CAAC;KACvB;IAED,qBAAqB;IACrB,SAAS,CAAC,UAAU,EAAE,CAAC;IAEvB,wBAAwB;IACxB,IAAI,cAAc,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC3D,IAAM,gBAAgB,GACpB,MAAM,CAAC,aAAa,GAAG,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,CAAC,MAAM,CAAC,aAAa,GAAG,GAAG,CAAC,CAAC,CAAC;IACxF,cAAc,IAAI,gBAAgB,CAAC;IAEnC,SAAS,CAAC,aAAa,GAAG,IAAI,CAAC,GAAG,CAChC,MAAM,CAAC,gBAAgB,GAAG,cAAc,EACxC,MAAM,CAAC,gBAAgB,CACxB,CAAC;IAEF,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAe,KAAK,CAClB,MAA8B,EAC9B,OAAwB,EACxB,iBAAwC,EACxC,GAAgB,EAChB,SAAqB;;;;;;oBAErB,SAAS,GAAG,eAAe,CAAC,MAAM,EAAE,SAAS,EAAE,GAAG,CAAC,CAAC;yBAElD,CAAA,GAAG;wBACH,GAAG,CAAC,IAAI;wBACR,WAAW,CAAC,MAAM,EAAE,SAAS,CAAC;wBAC9B,CAAC,GAAG,CAAC,IAAI,KAAK,WAAW;4BACvB,GAAG,CAAC,IAAI,KAAK,iBAAiB;4BAC9B,GAAG,CAAC,IAAI,KAAK,cAAc;4BAC3B,GAAG,CAAC,IAAI,KAAK,YAAY;4BACzB,GAAG,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAA,EAPxB,wBAOwB;;;;oBAItB,qBAAM,KAAK,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,EAAA;;oBAA1C,SAA0C,CAAC;oBAC3C,sBAAO,MAAM,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,KAAK,EAAE,CAAC,EAAC;;;oBAEvD,sBAAO,KAAK,CAAC,MAAM,EAAE,OAAO,EAAE,iBAAiB,EAAE,OAAK,EAAE,SAAS,CAAC,EAAC;;;oBAGrE,IAAI,GAAG,EAAE;wBACP,qFAAqF;wBACrF,sBAAO,OAAO,CAAC,MAAM,CAAC,SAAS,CAAC,KAAK,CAAC,EAAC;qBACxC;oBACD,sBAAO,iBAAiB,EAAC;;;;;CAE5B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts new file mode 100644 index 0000000000..41ea640941 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts @@ -0,0 +1,28 @@ +import { BaseRequestPolicy, RequestPolicy, RequestPolicyOptionsLike, RequestPolicyFactory } from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} +export declare function throttlingRetryPolicy(maxRetries?: number): RequestPolicyFactory; +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export declare class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit; + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number); + sendRequest(httpRequest: WebResourceLike): Promise; + private retry; + static parseRetryAfterHeader(headerValue: string): number | undefined; + static parseDateRetryAfterHeader(headerValue: string): number | undefined; +} +//# sourceMappingURL=throttlingRetryPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map new file mode 100644 index 0000000000..124aa8ea77 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAGA,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,wBAAwB,EACxB,oBAAoB,EACrB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAOjE;;GAEG;AACH,MAAM,WAAW,sBAAsB;IACrC;;OAEG;IACH,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,wBAAgB,qBAAqB,CACnC,UAAU,GAAE,MAA4B,GACvC,oBAAoB,CAMtB;AAED;;;;;GAKG;AACH,qBAAa,qBAAsB,SAAQ,iBAAiB;IAC1D,OAAO,CAAC,UAAU,CAAS;gBAEf,UAAU,EAAE,aAAa,EAAE,OAAO,EAAE,wBAAwB,EAAE,UAAU,EAAE,MAAM;IAK/E,WAAW,CAAC,WAAW,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;YAMxE,KAAK;WA2BL,qBAAqB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;WAS9D,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS;CAWjF"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js new file mode 100644 index 0000000000..9768e0509a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __awaiter, __extends, __generator } from "tslib"; +import { BaseRequestPolicy, } from "./requestPolicy"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; +var StatusCodes = Constants.HttpConstants.StatusCodes; +var DEFAULT_RETRY_COUNT = 3; +export function throttlingRetryPolicy(maxRetries) { + if (maxRetries === void 0) { maxRetries = DEFAULT_RETRY_COUNT; } + return { + create: function (nextPolicy, options) { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +var ThrottlingRetryPolicy = /** @class */ (function (_super) { + __extends(ThrottlingRetryPolicy, _super); + function ThrottlingRetryPolicy(nextPolicy, options, retryLimit) { + var _this = _super.call(this, nextPolicy, options) || this; + _this.retryLimit = retryLimit; + return _this; + } + ThrottlingRetryPolicy.prototype.sendRequest = function (httpRequest) { + return __awaiter(this, void 0, void 0, function () { + var _this = this; + return __generator(this, function (_a) { + return [2 /*return*/, this._nextPolicy.sendRequest(httpRequest.clone()).then(function (response) { + return _this.retry(httpRequest, response, 0); + })]; + }); + }); + }; + ThrottlingRetryPolicy.prototype.retry = function (httpRequest, httpResponse, retryCount) { + return __awaiter(this, void 0, void 0, function () { + var retryAfterHeader, delayInMs, res; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return [2 /*return*/, httpResponse]; + } + retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); + if (!(retryAfterHeader && retryCount < this.retryLimit)) return [3 /*break*/, 3]; + delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); + if (!delayInMs) return [3 /*break*/, 3]; + return [4 /*yield*/, delay(delayInMs)]; + case 1: + _a.sent(); + return [4 /*yield*/, this._nextPolicy.sendRequest(httpRequest)]; + case 2: + res = _a.sent(); + return [2 /*return*/, this.retry(httpRequest, res, retryCount + 1)]; + case 3: return [2 /*return*/, httpResponse]; + } + }); + }); + }; + ThrottlingRetryPolicy.parseRetryAfterHeader = function (headerValue) { + var retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } + else { + return retryAfterInSeconds * 1000; + } + }; + ThrottlingRetryPolicy.parseDateRetryAfterHeader = function (headerValue) { + try { + var now = Date.now(); + var date = Date.parse(headerValue); + var diff = date - now; + return Number.isNaN(diff) ? undefined : diff; + } + catch (error) { + return undefined; + } + }; + return ThrottlingRetryPolicy; +}(BaseRequestPolicy)); +export { ThrottlingRetryPolicy }; +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map new file mode 100644 index 0000000000..91fa3691ba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/throttlingRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"throttlingRetryPolicy.js","sourceRoot":"","sources":["../../../lib/policies/throttlingRetryPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAGzB,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AAEtC,IAAM,WAAW,GAAG,SAAS,CAAC,aAAa,CAAC,WAAW,CAAC;AACxD,IAAM,mBAAmB,GAAG,CAAC,CAAC;AAY9B,MAAM,UAAU,qBAAqB,CACnC,UAAwC;IAAxC,2BAAA,EAAA,gCAAwC;IAExC,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,qBAAqB,CAAC,UAAU,EAAE,OAAO,EAAE,UAAU,CAAC,CAAC;QACpE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH;IAA2C,yCAAiB;IAG1D,+BAAY,UAAyB,EAAE,OAAiC,EAAE,UAAkB;QAA5F,YACE,kBAAM,UAAU,EAAE,OAAO,CAAC,SAE3B;QADC,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;;IAC/B,CAAC;IAEY,2CAAW,GAAxB,UAAyB,WAA4B;;;;gBACnD,sBAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,UAAC,QAAQ;wBACrE,OAAO,KAAI,CAAC,KAAK,CAAC,WAAW,EAAE,QAAQ,EAAE,CAAC,CAAC,CAAC;oBAC9C,CAAC,CAAC,EAAC;;;KACJ;IAEa,qCAAK,GAAnB,UACE,WAA4B,EAC5B,YAAmC,EACnC,UAAkB;;;;;;wBAElB,IAAI,YAAY,CAAC,MAAM,KAAK,WAAW,CAAC,eAAe,EAAE;4BACvD,sBAAO,YAAY,EAAC;yBACrB;wBAEK,gBAAgB,GAAuB,YAAY,CAAC,OAAO,CAAC,GAAG,CACnE,SAAS,CAAC,eAAe,CAAC,WAAW,CACtC,CAAC;6BAEE,CAAA,gBAAgB,IAAI,UAAU,GAAG,IAAI,CAAC,UAAU,CAAA,EAAhD,wBAAgD;wBAC5C,SAAS,GAAuB,qBAAqB,CAAC,qBAAqB,CAC/E,gBAAgB,CACjB,CAAC;6BACE,SAAS,EAAT,wBAAS;wBACX,qBAAM,KAAK,CAAC,SAAS,CAAC,EAAA;;wBAAtB,SAAsB,CAAC;wBACX,qBAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,EAAA;;wBAArD,GAAG,GAAG,SAA+C;wBAC3D,sBAAO,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,UAAU,GAAG,CAAC,CAAC,EAAC;4BAIxD,sBAAO,YAAY,EAAC;;;;KACrB;IAEa,2CAAqB,GAAnC,UAAoC,WAAmB;QACrD,IAAM,mBAAmB,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC;QAChD,IAAI,MAAM,CAAC,KAAK,CAAC,mBAAmB,CAAC,EAAE;YACrC,OAAO,qBAAqB,CAAC,yBAAyB,CAAC,WAAW,CAAC,CAAC;SACrE;aAAM;YACL,OAAO,mBAAmB,GAAG,IAAI,CAAC;SACnC;IACH,CAAC;IAEa,+CAAyB,GAAvC,UAAwC,WAAmB;QACzD,IAAI;YACF,IAAM,GAAG,GAAW,IAAI,CAAC,GAAG,EAAE,CAAC;YAC/B,IAAM,IAAI,GAAW,IAAI,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;YAC7C,IAAM,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC;YAExB,OAAO,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC;SAC9C;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,SAAS,CAAC;SAClB;IACH,CAAC;IACH,4BAAC;AAAD,CAAC,AA7DD,CAA2C,iBAAiB,GA6D3D"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts new file mode 100644 index 0000000000..2ba12d403d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts @@ -0,0 +1,21 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, RequestPolicy, RequestPolicyFactory, RequestPolicyOptionsLike } from "./requestPolicy"; +export declare type TelemetryInfo = { + key?: string; + value?: string; +}; +export declare const getDefaultUserAgentHeaderName: typeof getDefaultUserAgentKey; +export declare function getDefaultUserAgentValue(): string; +export declare function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory; +export declare class UserAgentPolicy extends BaseRequestPolicy { + readonly _nextPolicy: RequestPolicy; + readonly _options: RequestPolicyOptionsLike; + protected headerKey: string; + protected headerValue: string; + constructor(_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike, headerKey: string, headerValue: string); + sendRequest(request: WebResourceLike): Promise; + addUserAgentHeader(request: WebResourceLike): void; +} +//# sourceMappingURL=userAgentPolicy.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map new file mode 100644 index 0000000000..4a3c4c1ce4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.d.ts","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AAEjE,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AACjD,OAAO,EAAE,sBAAsB,EAA2B,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,EACjB,aAAa,EACb,oBAAoB,EACpB,wBAAwB,EACzB,MAAM,iBAAiB,CAAC;AAEzB,oBAAY,aAAa,GAAG;IAAE,GAAG,CAAC,EAAE,MAAM,CAAC;IAAC,KAAK,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC;AAwB7D,eAAO,MAAM,6BAA6B,+BAAyB,CAAC;AAEpE,wBAAgB,wBAAwB,IAAI,MAAM,CAKjD;AAED,wBAAgB,eAAe,CAAC,aAAa,CAAC,EAAE,aAAa,GAAG,oBAAoB,CAanF;AAED,qBAAa,eAAgB,SAAQ,iBAAiB;IAElD,QAAQ,CAAC,WAAW,EAAE,aAAa;IACnC,QAAQ,CAAC,QAAQ,EAAE,wBAAwB;IAC3C,SAAS,CAAC,SAAS,EAAE,MAAM;IAC3B,SAAS,CAAC,WAAW,EAAE,MAAM;gBAHpB,WAAW,EAAE,aAAa,EAC1B,QAAQ,EAAE,wBAAwB,EACjC,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,MAAM;IAK/B,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IAKrE,kBAAkB,CAAC,OAAO,EAAE,eAAe,GAAG,IAAI;CASnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js new file mode 100644 index 0000000000..21a439140c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js @@ -0,0 +1,68 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { BaseRequestPolicy, } from "./requestPolicy"; +function getRuntimeInfo() { + var msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator, valueSeparator) { + if (keySeparator === void 0) { keySeparator = " "; } + if (valueSeparator === void 0) { valueSeparator = "/"; } + return telemetryInfo + .map(function (info) { + var value = info.value ? "" + valueSeparator + info.value : ""; + return "" + info.key + value; + }) + .join(keySeparator); +} +export var getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +export function getDefaultUserAgentValue() { + var runtimeInfo = getRuntimeInfo(); + var platformSpecificData = getPlatformSpecificData(); + var userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +export function userAgentPolicy(userAgentData) { + var key = !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + var value = !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: function (nextPolicy, options) { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +var UserAgentPolicy = /** @class */ (function (_super) { + __extends(UserAgentPolicy, _super); + function UserAgentPolicy(_nextPolicy, _options, headerKey, headerValue) { + var _this = _super.call(this, _nextPolicy, _options) || this; + _this._nextPolicy = _nextPolicy; + _this._options = _options; + _this.headerKey = headerKey; + _this.headerValue = headerValue; + return _this; + } + UserAgentPolicy.prototype.sendRequest = function (request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + }; + UserAgentPolicy.prototype.addUserAgentHeader = function (request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + }; + return UserAgentPolicy; +}(BaseRequestPolicy)); +export { UserAgentPolicy }; +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map new file mode 100644 index 0000000000..3e2fb85019 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/policies/userAgentPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"userAgentPolicy.js","sourceRoot":"","sources":["../../../lib/policies/userAgentPolicy.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAE,WAAW,EAAE,MAAM,gBAAgB,CAAC;AAE7C,OAAO,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AAE9C,OAAO,EAAE,sBAAsB,EAAE,uBAAuB,EAAE,MAAM,yBAAyB,CAAC;AAC1F,OAAO,EACL,iBAAiB,GAIlB,MAAM,iBAAiB,CAAC;AAIzB,SAAS,cAAc;IACrB,IAAM,aAAa,GAAG;QACpB,GAAG,EAAE,YAAY;QACjB,KAAK,EAAE,SAAS,CAAC,aAAa;KAC/B,CAAC;IAEF,OAAO,CAAC,aAAa,CAAC,CAAC;AACzB,CAAC;AAED,SAAS,kBAAkB,CACzB,aAA8B,EAC9B,YAAkB,EAClB,cAAoB;IADpB,6BAAA,EAAA,kBAAkB;IAClB,+BAAA,EAAA,oBAAoB;IAEpB,OAAO,aAAa;SACjB,GAAG,CAAC,UAAC,IAAI;QACR,IAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,KAAG,cAAc,GAAG,IAAI,CAAC,KAAO,CAAC,CAAC,CAAC,EAAE,CAAC;QACjE,OAAO,KAAG,IAAI,CAAC,GAAG,GAAG,KAAO,CAAC;IAC/B,CAAC,CAAC;SACD,IAAI,CAAC,YAAY,CAAC,CAAC;AACxB,CAAC;AAED,MAAM,CAAC,IAAM,6BAA6B,GAAG,sBAAsB,CAAC;AAEpE,MAAM,UAAU,wBAAwB;IACtC,IAAM,WAAW,GAAG,cAAc,EAAE,CAAC;IACrC,IAAM,oBAAoB,GAAG,uBAAuB,EAAE,CAAC;IACvD,IAAM,SAAS,GAAG,kBAAkB,CAAC,WAAW,CAAC,MAAM,CAAC,oBAAoB,CAAC,CAAC,CAAC;IAC/E,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,aAA6B;IAC3D,IAAM,GAAG,GACP,CAAC,aAAa,IAAI,aAAa,CAAC,GAAG,IAAI,SAAS,CAAC,CAAC,CAAC,sBAAsB,EAAE,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC;IAClG,IAAM,KAAK,GACT,CAAC,aAAa,IAAI,aAAa,CAAC,KAAK,IAAI,SAAS;QAChD,CAAC,CAAC,wBAAwB,EAAE;QAC5B,CAAC,CAAC,aAAa,CAAC,KAAK,CAAC;IAE1B,OAAO;QACL,MAAM,EAAE,UAAC,UAAyB,EAAE,OAAiC;YACnE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9D,CAAC;KACF,CAAC;AACJ,CAAC;AAED;IAAqC,mCAAiB;IACpD,yBACW,WAA0B,EAC1B,QAAkC,EACjC,SAAiB,EACjB,WAAmB;QAJ/B,YAME,kBAAM,WAAW,EAAE,QAAQ,CAAC,SAC7B;QANU,iBAAW,GAAX,WAAW,CAAe;QAC1B,cAAQ,GAAR,QAAQ,CAA0B;QACjC,eAAS,GAAT,SAAS,CAAQ;QACjB,iBAAW,GAAX,WAAW,CAAQ;;IAG/B,CAAC;IAED,qCAAW,GAAX,UAAY,OAAwB;QAClC,IAAI,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;QACjC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;IAED,4CAAkB,GAAlB,UAAmB,OAAwB;QACzC,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;YACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;SACrC;QAED,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,IAAI,CAAC,WAAW,EAAE;YAC5D,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;SACvD;IACH,CAAC;IACH,sBAAC;AAAD,CAAC,AAxBD,CAAqC,iBAAiB,GAwBrD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts new file mode 100644 index 0000000000..3ac840579d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts @@ -0,0 +1,33 @@ +/// +import * as http from "http"; +import * as https from "https"; +import { ProxySettings } from "./serviceClient"; +import { HttpHeadersLike } from "./httpHeaders"; +export declare type ProxyAgent = { + isHttps: boolean; + agent: http.Agent | https.Agent; +}; +export declare function createProxyAgent(requestUrl: string, proxySettings: ProxySettings, headers?: HttpHeadersLike): ProxyAgent; +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { + [key: string]: any; + }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} +export declare function createTunnel(isRequestHttps: boolean, isProxyHttps: boolean, tunnelOptions: HttpsOverHttpsOptions): http.Agent | https.Agent; +export {}; +//# sourceMappingURL=proxyAgent.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map new file mode 100644 index 0000000000..13222d6865 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.d.ts","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":";AAGA,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAG/B,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAEhD,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,oBAAY,UAAU,GAAG;IAAE,OAAO,EAAE,OAAO,CAAC;IAAC,KAAK,EAAE,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAAA;CAAE,CAAC;AAC/E,wBAAgB,gBAAgB,CAC9B,UAAU,EAAE,MAAM,EAClB,aAAa,EAAE,aAAa,EAC5B,OAAO,CAAC,EAAE,eAAe,GACxB,UAAU,CA0BZ;AAID,MAAM,WAAW,iBAAiB;IAChC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,UAAU,qBAAqB;IAC7B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,KAAK,CAAC,EAAE,iBAAiB,CAAC;CAC3B;AAED,wBAAgB,YAAY,CAC1B,cAAc,EAAE,OAAO,EACvB,YAAY,EAAE,OAAO,EACrB,aAAa,EAAE,qBAAqB,GACnC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,CAU1B"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js new file mode 100644 index 0000000000..c832234429 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js @@ -0,0 +1,43 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as tunnel from "tunnel"; +import { URLBuilder } from "./url"; +export function createProxyAgent(requestUrl, proxySettings, headers) { + var tunnelOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost(), + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = proxySettings.username + ":" + proxySettings.password; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = "" + proxySettings.username; + } + var requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + var isRequestHttps = requestScheme.toLowerCase() === "https"; + var proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + var isProxyHttps = proxyScheme.toLowerCase() === "https"; + var proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } + else { + return tunnel.httpOverHttp(tunnelOptions); + } +} +//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map new file mode 100644 index 0000000000..fe48db5c53 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/proxyAgent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"proxyAgent.js","sourceRoot":"","sources":["../../lib/proxyAgent.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAI/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAGjC,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AAInC,MAAM,UAAU,gBAAgB,CAC9B,UAAkB,EAClB,aAA4B,EAC5B,OAAyB;IAEzB,IAAM,aAAa,GAAiC;QAClD,KAAK,EAAE;YACL,IAAI,EAAE,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,OAAO,EAAY;YAC9D,IAAI,EAAE,aAAa,CAAC,IAAI;YACxB,OAAO,EAAE,CAAC,OAAO,IAAI,OAAO,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE;SACjD;KACF,CAAC;IAEF,IAAI,aAAa,CAAC,QAAQ,IAAI,aAAa,CAAC,QAAQ,EAAE;QACpD,aAAa,CAAC,KAAM,CAAC,SAAS,GAAM,aAAa,CAAC,QAAQ,SAAI,aAAa,CAAC,QAAU,CAAC;KACxF;SAAM,IAAI,aAAa,CAAC,QAAQ,EAAE;QACjC,aAAa,CAAC,KAAM,CAAC,SAAS,GAAG,KAAG,aAAa,CAAC,QAAU,CAAC;KAC9D;IAED,IAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IACrE,IAAM,cAAc,GAAG,aAAa,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAC/D,IAAM,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,IAAI,EAAE,CAAC;IAC3E,IAAM,YAAY,GAAG,WAAW,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC;IAE3D,IAAM,UAAU,GAAG;QACjB,OAAO,EAAE,cAAc;QACvB,KAAK,EAAE,YAAY,CAAC,cAAc,EAAE,YAAY,EAAE,aAAa,CAAC;KACjE,CAAC;IAEF,OAAO,UAAU,CAAC;AACpB,CAAC;AAwBD,MAAM,UAAU,YAAY,CAC1B,cAAuB,EACvB,YAAqB,EACrB,aAAoC;IAEpC,IAAI,cAAc,IAAI,YAAY,EAAE;QAClC,OAAO,MAAM,CAAC,cAAc,CAAC,aAAa,CAAC,CAAC;KAC7C;SAAM,IAAI,cAAc,IAAI,CAAC,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM,IAAI,CAAC,cAAc,IAAI,YAAY,EAAE;QAC1C,OAAO,MAAM,CAAC,aAAa,CAAC,aAAa,CAAC,CAAC;KAC5C;SAAM;QACL,OAAO,MAAM,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC;KAC3C;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts new file mode 100644 index 0000000000..a235f85631 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts @@ -0,0 +1,11 @@ +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export declare enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi" +} +//# sourceMappingURL=queryCollectionFormat.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map new file mode 100644 index 0000000000..7ad8dc8499 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.d.ts","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAGA;;GAEG;AACH,oBAAY,qBAAqB;IAC/B,GAAG,MAAM;IACT,GAAG,MAAM;IACT,GAAG,OAAO;IACV,KAAK,MAAM;IACX,KAAK,UAAU;CAChB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js new file mode 100644 index 0000000000..6128d013da --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export var QueryCollectionFormat; +(function (QueryCollectionFormat) { + QueryCollectionFormat["Csv"] = ","; + QueryCollectionFormat["Ssv"] = " "; + QueryCollectionFormat["Tsv"] = "\t"; + QueryCollectionFormat["Pipes"] = "|"; + QueryCollectionFormat["Multi"] = "Multi"; +})(QueryCollectionFormat || (QueryCollectionFormat = {})); +//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map new file mode 100644 index 0000000000..29c806c191 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/queryCollectionFormat.js.map @@ -0,0 +1 @@ +{"version":3,"file":"queryCollectionFormat.js","sourceRoot":"","sources":["../../lib/queryCollectionFormat.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;GAEG;AACH,MAAM,CAAN,IAAY,qBAMX;AAND,WAAY,qBAAqB;IAC/B,kCAAS,CAAA;IACT,kCAAS,CAAA;IACT,mCAAU,CAAA;IACV,oCAAW,CAAA;IACX,wCAAe,CAAA;AACjB,CAAC,EANW,qBAAqB,KAArB,qBAAqB,QAMhC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts new file mode 100644 index 0000000000..7b889c4c98 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts @@ -0,0 +1,14 @@ +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +export declare class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string; + static readonly REQUEST_ABORTED_ERROR: string; + static readonly PARSE_ERROR: string; + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor(message: string, code?: string, statusCode?: number, request?: WebResourceLike, response?: HttpOperationResponse, body?: any); +} +//# sourceMappingURL=restError.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map new file mode 100644 index 0000000000..d058e5a5d0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.d.ts","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,eAAe,EAAE,MAAM,eAAe,CAAC;AAEhD,qBAAa,SAAU,SAAQ,KAAK;IAClC,MAAM,CAAC,QAAQ,CAAC,kBAAkB,EAAE,MAAM,CAAwB;IAClE,MAAM,CAAC,QAAQ,CAAC,qBAAqB,EAAE,MAAM,CAA2B;IACxE,MAAM,CAAC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAiB;IAEpD,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,eAAe,CAAC;IAC1B,QAAQ,CAAC,EAAE,qBAAqB,CAAC;IACjC,IAAI,CAAC,EAAE,GAAG,CAAC;gBAET,OAAO,EAAE,MAAM,EACf,IAAI,CAAC,EAAE,MAAM,EACb,UAAU,CAAC,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,EAChC,IAAI,CAAC,EAAE,GAAG;CAWb"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js b/node_modules/@azure/ms-rest-js/es/lib/restError.js new file mode 100644 index 0000000000..9062520324 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __extends } from "tslib"; +var RestError = /** @class */ (function (_super) { + __extends(RestError, _super); + function RestError(message, code, statusCode, request, response, body) { + var _this = _super.call(this, message) || this; + _this.code = code; + _this.statusCode = statusCode; + _this.request = request; + _this.response = response; + _this.body = body; + Object.setPrototypeOf(_this, RestError.prototype); + return _this; + } + RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; + RestError.REQUEST_ABORTED_ERROR = "REQUEST_ABORTED_ERROR"; + RestError.PARSE_ERROR = "PARSE_ERROR"; + return RestError; +}(Error)); +export { RestError }; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/restError.js.map b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map new file mode 100644 index 0000000000..5772499db0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/restError.js.map @@ -0,0 +1 @@ +{"version":3,"file":"restError.js","sourceRoot":"","sources":["../../lib/restError.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAK/F;IAA+B,6BAAK;IAUlC,mBACE,OAAe,EACf,IAAa,EACb,UAAmB,EACnB,OAAyB,EACzB,QAAgC,EAChC,IAAU;QANZ,YAQE,kBAAM,OAAO,CAAC,SAQf;QAPC,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,KAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,KAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,KAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,KAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QAEjB,MAAM,CAAC,cAAc,CAAC,KAAI,EAAE,SAAS,CAAC,SAAS,CAAC,CAAC;;IACnD,CAAC;IAzBe,4BAAkB,GAAW,oBAAoB,CAAC;IAClD,+BAAqB,GAAW,uBAAuB,CAAC;IACxD,qBAAW,GAAW,aAAa,CAAC;IAwBtD,gBAAC;CAAA,AA3BD,CAA+B,KAAK,GA2BnC;SA3BY,SAAS"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts new file mode 100644 index 0000000000..2fb084f0bf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts @@ -0,0 +1,130 @@ +export declare class Serializer { + readonly modelMappers: { + [key: string]: any; + }; + readonly isXML?: boolean | undefined; + constructor(modelMappers?: { + [key: string]: any; + }, isXML?: boolean | undefined); + validateConstraints(mapper: Mapper, value: any, objectName: string): void; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any; +} +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} +export declare type MapperType = SimpleMapperType | CompositeMapperType | SequenceMapperType | DictionaryMapperType | EnumMapperType; +export interface SimpleMapperType { + name: "Base64Url" | "Boolean" | "ByteArray" | "Date" | "DateTime" | "DateTimeRfc1123" | "Object" | "Stream" | "String" | "TimeSpan" | "UnixTime" | "Uuid" | "Number" | "any"; +} +export interface CompositeMapperType { + name: "Composite"; + className?: string; + modelProperties?: { + [propertyName: string]: Mapper; + }; + additionalProperties?: Mapper; + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} +export declare type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} +export declare function serializeObject(toSerialize: any): any; +export declare const MapperType: { + Base64Url: "Base64Url"; + Boolean: "Boolean"; + ByteArray: "ByteArray"; + Date: "Date"; + DateTime: "DateTime"; + DateTimeRfc1123: "DateTimeRfc1123"; + Object: "Object"; + Stream: "Stream"; + String: "String"; + TimeSpan: "TimeSpan"; + UnixTime: "UnixTime"; + Number: "Number"; + Composite: "Composite"; + Sequence: "Sequence"; + Dictionary: "Dictionary"; + Enum: "Enum"; +}; +//# sourceMappingURL=serializer.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map new file mode 100644 index 0000000000..2516371c28 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.d.ts","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAMA,qBAAa,UAAU;aAEH,YAAY,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE;aACpC,KAAK,CAAC;gBADN,YAAY,GAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAO,EACzC,KAAK,CAAC,qBAAS;IAGjC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,IAAI;IA8DzE;;;;;;;;;;OAUG;IACH,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,GAAG,EAAE,UAAU,CAAC,EAAE,MAAM,GAAG,GAAG;IAmEhE;;;;;;;;;;OAUG;IACH,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,YAAY,EAAE,GAAG,EAAE,UAAU,EAAE,MAAM,GAAG,GAAG;CA4ExE;AAipBD,MAAM,WAAW,iBAAiB;IAChC,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,IAAI,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,oBAAY,UAAU,GAClB,gBAAgB,GAChB,mBAAmB,GACnB,kBAAkB,GAClB,oBAAoB,GACpB,cAAc,CAAC;AAEnB,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EACA,WAAW,GACX,SAAS,GACT,WAAW,GACX,MAAM,GACN,UAAU,GACV,iBAAiB,GACjB,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,UAAU,GACV,UAAU,GACV,MAAM,GACN,QAAQ,GACR,KAAK,CAAC;CACX;AAED,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,WAAW,CAAC;IAKlB,SAAS,CAAC,EAAE,MAAM,CAAC;IAEnB,eAAe,CAAC,EAAE;QAAE,CAAC,YAAY,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IACrD,oBAAoB,CAAC,EAAE,MAAM,CAAC;IAE9B,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,wBAAwB,CAAC,EAAE,wBAAwB,CAAC;CACrD;AAED,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,UAAU,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,YAAY,CAAC;IACnB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,aAAa,EAAE,GAAG,EAAE,CAAC;CACtB;AAED,MAAM,WAAW,UAAU;IACzB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,IAAI,EAAE,UAAU,CAAC;IACjB,YAAY,CAAC,EAAE,GAAG,CAAC;IACnB,WAAW,CAAC,EAAE,iBAAiB,CAAC;CACjC;AAED,oBAAY,MAAM,GAAG,UAAU,GAAG,eAAe,GAAG,cAAc,GAAG,gBAAgB,GAAG,UAAU,CAAC;AAEnG,MAAM,WAAW,wBAAwB;IACvC,cAAc,EAAE,MAAM,CAAC;IACvB,UAAU,EAAE,MAAM,CAAC;IACnB,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAC;CACvB;AAED,MAAM,WAAW,eAAgB,SAAQ,UAAU;IACjD,IAAI,EAAE,mBAAmB,CAAC;CAC3B;AAED,MAAM,WAAW,cAAe,SAAQ,UAAU;IAChD,IAAI,EAAE,kBAAkB,CAAC;CAC1B;AAED,MAAM,WAAW,gBAAiB,SAAQ,UAAU;IAClD,IAAI,EAAE,oBAAoB,CAAC;IAC3B,sBAAsB,CAAC,EAAE,MAAM,CAAC;CACjC;AAED,MAAM,WAAW,UAAW,SAAQ,UAAU;IAC5C,IAAI,EAAE,cAAc,CAAC;CACtB;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,eAAe,EAAE,OAAO,CAAC;CAC1B;AAGD,wBAAgB,eAAe,CAAC,WAAW,EAAE,GAAG,GAAG,GAAG,CAqBrD;AAaD,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;CAiBrB,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js b/node_modules/@azure/ms-rest-js/es/lib/serializer.js new file mode 100644 index 0000000000..9e60d917c6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js @@ -0,0 +1,789 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; +var Serializer = /** @class */ (function () { + function Serializer(modelMappers, isXML) { + if (modelMappers === void 0) { modelMappers = {}; } + this.modelMappers = modelMappers; + this.isXML = isXML; + } + Serializer.prototype.validateConstraints = function (mapper, value, objectName) { + var failValidation = function (constraintName, constraintValue) { + throw new Error("\"" + objectName + "\" with value \"" + value + "\" should satisfy the constraint \"" + constraintName + "\": " + constraintValue + "."); + }; + if (mapper.constraints && value != undefined) { + var _a = mapper.constraints, ExclusiveMaximum = _a.ExclusiveMaximum, ExclusiveMinimum = _a.ExclusiveMinimum, InclusiveMaximum = _a.InclusiveMaximum, InclusiveMinimum = _a.InclusiveMinimum, MaxItems = _a.MaxItems, MaxLength = _a.MaxLength, MinItems = _a.MinItems, MinLength = _a.MinLength, MultipleOf = _a.MultipleOf, Pattern = _a.Pattern, UniqueItems = _a.UniqueItems; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + var pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some(function (item, i, ar) { return ar.indexOf(item) !== i; })) { + failValidation("UniqueItems", UniqueItems); + } + } + }; + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + Serializer.prototype.serialize = function (mapper, object, objectName) { + var payload = {}; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + var required = mapper.required, nullable = mapper.nullable; + if (required && nullable && object === undefined) { + throw new Error(objectName + " cannot be undefined."); + } + if (required && !nullable && object == undefined) { + throw new Error(objectName + " cannot be null or undefined."); + } + if (!required && nullable === false && object === null) { + throw new Error(objectName + " cannot be null."); + } + if (object == undefined) { + payload = object; + } + else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/gi) !== null) { + var enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName); + } + else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName); + } + } + return payload; + }; + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + Serializer.prototype.deserialize = function (mapper, responseBody, objectName) { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + var payload; + var mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName); + } + else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName); + } + else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + }; + return Serializer; +}()); +export { Serializer }; +function trimEnd(str, ch) { + var len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error("Please provide an input of type Uint8Array for converting to Base64Url."); + } + // Uint8Array to Base64. + var str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + var classes = []; + var partialclass = ""; + if (prop) { + var subwords = prop.split("."); + for (var _i = 0, subwords_1 = subwords; _i < subwords_1.length; _i++) { + var item = subwords_1[_i]; + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(objectName + " with value " + value + " must be of type number."); + } + } + else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(objectName + " with value \"" + value + "\" must be of type string."); + } + } + else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error(objectName + " with value \"" + value + "\" must be of type string and a valid uuid."); + } + } + else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(objectName + " with value " + value + " must be of type boolean."); + } + } + else if (typeName.match(/^Stream$/gi) !== null) { + var objectType = typeof value; + if (objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob)) { + throw new Error(objectName + " must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream."); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error("Please provide a set of allowedValues to validate " + objectName + " as an Enum Type."); + } + var isPresent = allowedValues.some(function (item) { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(value + " is not a valid value for " + objectName + ". The valid values are: " + JSON.stringify(allowedValues) + "."); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(objectName + " must be of type Uint8Array."); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in ISO8601 format."); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123 format."); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/gi) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(objectName + " must be an instanceof Date or a string in RFC-1123/ISO8601 format " + + "for it to be serialized in UnixTime/Epoch format."); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error(objectName + " must be a string in ISO 8601 format. Instead was \"" + value + "\"."); + } + value = value; + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName) { + if (!Array.isArray(object)) { + throw new Error(objectName + " must be of type Array."); + } + var elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempArray = []; + for (var i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName) { + if (typeof object !== "object") { + throw new Error(objectName + " must be of type object."); + } + var valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName + ".")); + } + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(object); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + var modelProps = mapper.type.modelProperties; + if (!modelProps) { + var className = mapper.type.className; + if (!className) { + throw new Error("Class name for model \"" + objectName + "\" is not provided in the mapper \"" + JSON.stringify(mapper, undefined, 2) + "\"."); + } + var modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error("mapper() cannot be null or undefined for model \"" + className + "\"."); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error("modelProperties cannot be null or undefined in the " + + ("mapper \"" + JSON.stringify(modelMapper) + "\" of type \"" + className + "\" for object \"" + objectName + "\".")); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName) { + var _a; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object != undefined) { + var payload = {}; + var modelProps = resolveModelProperties(serializer, mapper, objectName); + for (var _i = 0, _b = Object.keys(modelProps); _i < _b.length; _i++) { + var key = _b[_i]; + var propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + var propName = void 0; + var parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + var paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (var _c = 0, paths_1 = paths; _c < paths_1.length; _c++) { + var pathName = paths_1[_c]; + var childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject != undefined) { + var propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + var toSerialize = object[key]; + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined) { + toSerialize = mapper.serializedName; + } + var serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } + else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = (_a = {}, _a[propertyMapper.xmlElementName] = serializedValue, _a); + } + else { + parentObject[propName] = serializedValue; + } + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var propNames = Object.keys(modelProps); + var _loop_1 = function (clientPropName) { + var isAdditionalProperty = propNames.every(function (pn) { return pn !== clientPropName; }); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]'); + } + }; + for (var clientPropName in object) { + _loop_1(clientPropName); + } + } + return payload; + } + return object; +} +function isSpecialXmlProperty(propertyName) { + return ["$", "_"].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + var modelProps = resolveModelProperties(serializer, mapper, objectName); + var instance = {}; + var handledPropertyNames = []; + for (var _i = 0, _a = Object.keys(modelProps); _i < _a.length; _i++) { + var key = _a[_i]; + var propertyMapper = modelProps[key]; + var paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + var serializedName = propertyMapper.serializedName, xmlName = propertyMapper.xmlName, xmlElementName = propertyMapper.xmlElementName; + var propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + var headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + var dictionary = {}; + for (var _b = 0, _c = Object.keys(responseBody); _b < _c.length; _b++) { + var headerKey = _c[_b]; + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize(propertyMapper, responseBody.$[xmlName], propertyObjectName); + } + else { + var propertyName = xmlElementName || xmlName || serializedName; + var unwrappedProperty = responseBody[propertyName]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName]; + var isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize(propertyMapper, unwrappedProperty, propertyObjectName); + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + var propertyInstance = void 0; + var res = responseBody; + // traversing the object step by step. + for (var _d = 0, paths_2 = paths; _d < paths_2.length; _d++) { + var item = paths_2[_d]; + if (!res) + break; + res = res[item]; + } + propertyInstance = res; + var polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined) { + propertyInstance = mapper.serializedName; + } + var serializedValue = void 0; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + var arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (var _e = 0, _f = Object.entries(instance); _e < _f.length; _e++) { + var _g = _f[_e], key_1 = _g[0], value = _g[1]; + if (!arrayInstance.hasOwnProperty(key_1)) { + arrayInstance[key_1] = value; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName); + instance[key] = serializedValue; + } + } + } + var additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + var isAdditionalProperty = function (responsePropName) { + for (var clientPropName in modelProps) { + var paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (var responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]'); + } + } + } + else if (responseBody) { + for (var _h = 0, _j = Object.keys(responseBody); _h < _j.length; _h++) { + var key = _j[_h]; + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error("\"value\" metadata for a Dictionary must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + var tempDictionary = {}; + for (var _i = 0, _a = Object.keys(responseBody); _i < _a.length; _i++) { + var key = _a[_i]; + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName) { + /*jshint validthis: true */ + var element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error("element\" metadata for an Array must be defined in the " + + ("mapper and it must of type \"object\" in " + objectName)); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + var tempArray = []; + for (var i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], objectName + "[" + i + "]"); + } + return tempArray; + } + return responseBody; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + var discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + var discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + var typeName = mapper.type.uberParent || mapper.type.className; + var indexDiscriminator = discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + var polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +// TODO: why is this here? +export function serializeObject(toSerialize) { + if (toSerialize == undefined) + return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } + else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } + else if (Array.isArray(toSerialize)) { + var array = []; + for (var i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } + else if (typeof toSerialize === "object") { + var dictionary = {}; + for (var property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o) { + var result = {}; + for (var _i = 0, o_1 = o; _i < o_1.length; _i++) { + var key = o_1[_i]; + result[key] = key; + } + return result; +} +export var MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map new file mode 100644 index 0000000000..cfeb735e59 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../../lib/serializer.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,eAAe,CAAC;AACxC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AAEtC;IACE,oBACkB,YAAyC,EACzC,KAAe;QADf,6BAAA,EAAA,iBAAyC;QAAzC,iBAAY,GAAZ,YAAY,CAA6B;QACzC,UAAK,GAAL,KAAK,CAAU;IAC9B,CAAC;IAEJ,wCAAmB,GAAnB,UAAoB,MAAc,EAAE,KAAU,EAAE,UAAkB;QAChE,IAAM,cAAc,GAAG,UAAC,cAAuC,EAAE,eAAoB;YACnF,MAAM,IAAI,KAAK,CACb,OAAI,UAAU,wBAAiB,KAAK,2CAAoC,cAAc,YAAM,eAAe,MAAG,CAC/G,CAAC;QACJ,CAAC,CAAC;QACF,IAAI,MAAM,CAAC,WAAW,IAAI,KAAK,IAAI,SAAS,EAAE;YACtC,IAAA,KAYF,MAAM,CAAC,WAAW,EAXpB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,gBAAgB,sBAAA,EAChB,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,QAAQ,cAAA,EACR,SAAS,eAAA,EACT,UAAU,gBAAA,EACV,OAAO,aAAA,EACP,WAAW,iBACS,CAAC;YACvB,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,IAAI,gBAAgB,EAAE;gBAC9D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,gBAAgB,IAAI,SAAS,IAAI,KAAK,GAAG,gBAAgB,EAAE;gBAC7D,cAAc,CAAC,kBAAkB,EAAE,gBAAgB,CAAC,CAAC;aACtD;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,QAAQ,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,QAAQ,EAAE;gBACpD,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;aACtC;YACD,IAAI,SAAS,IAAI,SAAS,IAAI,KAAK,CAAC,MAAM,GAAG,SAAS,EAAE;gBACtD,cAAc,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;aACxC;YACD,IAAI,UAAU,IAAI,SAAS,IAAI,KAAK,GAAG,UAAU,KAAK,CAAC,EAAE;gBACvD,cAAc,CAAC,YAAY,EAAE,UAAU,CAAC,CAAC;aAC1C;YACD,IAAI,OAAO,EAAE;gBACX,IAAM,OAAO,GAAW,OAAO,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC;gBACpF,IAAI,OAAO,KAAK,KAAK,QAAQ,IAAI,KAAK,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,IAAI,EAAE;oBAC9D,cAAc,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;iBACpC;aACF;YACD,IACE,WAAW;gBACX,KAAK,CAAC,IAAI,CAAC,UAAC,IAAS,EAAE,CAAS,EAAE,EAAc,IAAK,OAAA,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,EAAtB,CAAsB,CAAC,EAC5E;gBACA,cAAc,CAAC,aAAa,EAAE,WAAW,CAAC,CAAC;aAC5C;SACF;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACH,8BAAS,GAAT,UAAU,MAAc,EAAE,MAAW,EAAE,UAAmB;QACxD,IAAI,OAAO,GAAQ,EAAE,CAAC;QACtB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAc,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QACD,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAC7C,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC;SAC9B;QAED,mDAAmD;QACnD,sDAAsD;QACtD,mDAAmD;QACnD,wBAAwB;QACxB,iCAAiC;QACjC,0CAA0C;QAC1C,0CAA0C;QAC1C,qCAAqC;QACrC,0CAA0C;QAElC,IAAA,QAAQ,GAAe,MAAM,SAArB,EAAE,QAAQ,GAAK,MAAM,SAAX,CAAY;QAEtC,IAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM,KAAK,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,0BAAuB,CAAC,CAAC;SACvD;QACD,IAAI,QAAQ,IAAI,CAAC,QAAQ,IAAI,MAAM,IAAI,SAAS,EAAE;YAChD,MAAM,IAAI,KAAK,CAAI,UAAU,kCAA+B,CAAC,CAAC;SAC/D;QACD,IAAI,CAAC,QAAQ,IAAI,QAAQ,KAAK,KAAK,IAAI,MAAM,KAAK,IAAI,EAAE;YACtD,MAAM,IAAI,KAAK,CAAI,UAAU,qBAAkB,CAAC,CAAC;SAClD;QAED,IAAI,MAAM,IAAI,SAAS,EAAE;YACvB,OAAO,GAAG,MAAM,CAAC;SAClB;aAAM;YACL,8BAA8B;YAC9B,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;YACrD,IAAI,UAAU,CAAC,KAAK,CAAC,SAAS,CAAC,KAAK,IAAI,EAAE;gBACxC,OAAO,GAAG,MAAM,CAAC;aAClB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gDAAgD,CAAC,KAAK,IAAI,EAAE;gBACtF,OAAO,GAAG,mBAAmB,CAAC,UAAU,EAAE,UAAU,EAAE,MAAM,CAAC,CAAC;aAC/D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;gBAChD,IAAM,UAAU,GAAe,MAAoB,CAAC;gBACpD,OAAO,GAAG,iBAAiB,CAAC,UAAU,EAAE,UAAU,CAAC,IAAI,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;aAChF;iBAAM,IACL,UAAU,CAAC,KAAK,CAAC,uDAAuD,CAAC,KAAK,IAAI,EAClF;gBACA,OAAO,GAAG,kBAAkB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aAC9D;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;aACtD;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,qBAAqB,CAAC,IAAI,EAAE,MAAwB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACrF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAA0B,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACzF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,sBAAsB,CAAC,IAAI,EAAE,MAAyB,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;aACvF;SACF;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;;;;;OAUG;IACH,gCAAW,GAAX,UAAY,MAAc,EAAE,YAAiB,EAAE,UAAkB;QAC/D,IAAI,YAAY,IAAI,SAAS,EAAE;YAC7B,IAAI,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,KAAK,UAAU,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;gBACzE,sEAAsE;gBACtE,qDAAqD;gBACrD,qEAAqE;gBACrE,YAAY,GAAG,EAAE,CAAC;aACnB;YACD,+FAA+F;YAC/F,IAAI,MAAM,CAAC,YAAY,KAAK,SAAS,EAAE;gBACrC,YAAY,GAAG,MAAM,CAAC,YAAY,CAAC;aACpC;YACD,OAAO,YAAY,CAAC;SACrB;QAED,IAAI,OAAY,CAAC;QACjB,IAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC;QACpC,IAAI,CAAC,UAAU,EAAE;YACf,UAAU,GAAG,MAAM,CAAC,cAAe,CAAC;SACrC;QAED,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;YAC9C,OAAO,GAAG,wBAAwB,CAAC,IAAI,EAAE,MAAyB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;SAC/F;aAAM;YACL,IAAI,IAAI,CAAC,KAAK,EAAE;gBACd;;;;mBAIG;gBACH,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,IAAI,YAAY,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;oBACpE,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;iBAClC;aACF;YAED,IAAI,UAAU,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;gBAC3C,OAAO,GAAG,UAAU,CAAC,YAAY,CAAC,CAAC;gBACnC,IAAI,KAAK,CAAC,OAAO,CAAC,EAAE;oBAClB,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;gBACnD,IAAI,YAAY,KAAK,MAAM,EAAE;oBAC3B,OAAO,GAAG,IAAI,CAAC;iBAChB;qBAAM,IAAI,YAAY,KAAK,OAAO,EAAE;oBACnC,OAAO,GAAG,KAAK,CAAC;iBACjB;qBAAM;oBACL,OAAO,GAAG,YAAY,CAAC;iBACxB;aACF;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,mDAAmD,CAAC,KAAK,IAAI,EAAE;gBACzF,OAAO,GAAG,YAAY,CAAC;aACxB;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,qCAAqC,CAAC,KAAK,IAAI,EAAE;gBAC3E,OAAO,GAAG,IAAI,IAAI,CAAC,YAAY,CAAC,CAAC;aAClC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,cAAc,CAAC,YAAY,CAAC,CAAC;aACxC;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC;aAC7C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,eAAe,CAAC,KAAK,IAAI,EAAE;gBACrD,OAAO,GAAG,oBAAoB,CAAC,YAAY,CAAC,CAAC;aAC9C;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;gBACpD,OAAO,GAAG,uBAAuB,CAAC,IAAI,EAAE,MAAwB,EAAE,YAAY,EAAE,UAAU,CAAC,CAAC;aAC7F;iBAAM,IAAI,UAAU,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,IAAI,EAAE;gBACtD,OAAO,GAAG,yBAAyB,CACjC,IAAI,EACJ,MAA0B,EAC1B,YAAY,EACZ,UAAU,CACX,CAAC;aACH;SACF;QAED,IAAI,MAAM,CAAC,UAAU,EAAE;YACrB,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC;IACjB,CAAC;IACH,iBAAC;AAAD,CAAC,AAzOD,IAyOC;;AAED,SAAS,OAAO,CAAC,GAAW,EAAE,EAAU;IACtC,IAAI,GAAG,GAAG,GAAG,CAAC,MAAM,CAAC;IACrB,OAAO,GAAG,GAAG,CAAC,IAAI,CAAC,IAAI,GAAG,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE;QAC1C,EAAE,GAAG,CAAC;KACP;IACD,OAAO,GAAG,CAAC,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,iBAAiB,CAAC,MAAW;IACpC,IAAI,CAAC,MAAM,EAAE;QACX,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,CAAC,CAAC,MAAM,YAAY,UAAU,CAAC,EAAE;QACnC,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;KAC5F;IACD,wBAAwB;IACxB,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;IAC3C,uBAAuB;IACvB,OAAO,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;AACnE,CAAC;AAED,SAAS,oBAAoB,CAAC,GAAW;IACvC,IAAI,CAAC,GAAG,EAAE;QACR,OAAO,SAAS,CAAC;KAClB;IACD,IAAI,GAAG,IAAI,OAAO,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QAC5C,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;KACxF;IACD,uBAAuB;IACvB,GAAG,GAAG,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;IAClD,wBAAwB;IACxB,OAAO,MAAM,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,SAAS,kBAAkB,CAAC,IAAwB;IAClD,IAAM,OAAO,GAAa,EAAE,CAAC;IAC7B,IAAI,YAAY,GAAG,EAAE,CAAC;IACtB,IAAI,IAAI,EAAE;QACR,IAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAEjC,KAAmB,UAAQ,EAAR,qBAAQ,EAAR,sBAAQ,EAAR,IAAQ,EAAE;YAAxB,IAAM,IAAI,iBAAA;YACb,IAAI,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,KAAK,IAAI,EAAE;gBACzC,YAAY,IAAI,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;aACvD;iBAAM;gBACL,YAAY,IAAI,IAAI,CAAC;gBACrB,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;gBAC3B,YAAY,GAAG,EAAE,CAAC;aACnB;SACF;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,SAAS,cAAc,CAAC,CAAgB;IACtC,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;QACnC,CAAC,GAAG,IAAI,IAAI,CAAC,CAAW,CAAC,CAAC;KAC3B;IACD,OAAO,IAAI,CAAC,KAAK,CAAE,CAAU,CAAC,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AAClD,CAAC;AAED,SAAS,cAAc,CAAC,CAAS;IAC/B,IAAI,CAAC,CAAC,EAAE;QACN,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,SAAS,mBAAmB,CAAC,QAAgB,EAAE,UAAkB,EAAE,KAAU;IAC3E,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,SAAS,EAAE;QACzC,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YACzC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,6BAA0B,CAAC,CAAC;aAC9E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAI,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CAAI,UAAU,sBAAgB,KAAK,+BAA2B,CAAC,CAAC;aAChF;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YAC9C,IAAI,CAAC,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,KAAK,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBACtE,MAAM,IAAI,KAAK,CACV,UAAU,sBAAgB,KAAK,gDAA4C,CAC/E,CAAC;aACH;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,KAAK,IAAI,EAAE;YACjD,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;gBAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,oBAAe,KAAK,8BAA2B,CAAC,CAAC;aAC/E;SACF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;YAChD,IAAM,UAAU,GAAG,OAAO,KAAK,CAAC;YAChC,IACE,UAAU,KAAK,QAAQ;gBACvB,UAAU,KAAK,UAAU;gBACzB,CAAC,CAAC,KAAK,YAAY,WAAW,CAAC;gBAC/B,CAAC,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC;gBAC1B,CAAC,CAAC,OAAO,IAAI,KAAK,UAAU,IAAI,KAAK,YAAY,IAAI,CAAC,EACtD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,0GAAuG,CACrH,CAAC;aACH;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,iBAAiB,CAAC,UAAkB,EAAE,aAAyB,EAAE,KAAU;IAClF,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,IAAI,KAAK,CACb,uDAAqD,UAAU,sBAAmB,CACnF,CAAC;KACH;IACD,IAAM,SAAS,GAAG,aAAa,CAAC,IAAI,CAAC,UAAC,IAAI;QACxC,IAAI,OAAO,IAAI,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YACtC,OAAO,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,WAAW,EAAE,CAAC;SACnD;QACD,OAAO,IAAI,KAAK,KAAK,CAAC;IACxB,CAAC,CAAC,CAAC;IACH,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,IAAI,KAAK,CACV,KAAK,kCAA6B,UAAU,gCAA2B,IAAI,CAAC,SAAS,CACtF,aAAa,CACd,MAAG,CACL,CAAC;KACH;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,MAAM,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC;KACvC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,sBAAsB,CAAC,UAAkB,EAAE,KAAU;IAC5D,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,CAAC,CAAC,KAAK,YAAY,UAAU,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAI,UAAU,iCAA8B,CAAC,CAAC;SAC9D;QACD,KAAK,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC;KAClC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,kBAAkB,CAAC,QAAgB,EAAE,KAAU,EAAE,UAAkB;IAC1E,IAAI,KAAK,IAAI,SAAS,EAAE;QACtB,IAAI,QAAQ,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,IAAI,EAAE;YACvC,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK;gBACH,KAAK,YAAY,IAAI;oBACnB,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC;oBACtC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC,SAAS,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;SACtD;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,+DAA4D,CAAC,CAAC;aAC5F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,qBAAqB,CAAC,KAAK,IAAI,EAAE;YACzD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CAAI,UAAU,gEAA6D,CAAC,CAAC;aAC7F;YACD,KAAK,GAAG,KAAK,YAAY,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;SACrF;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IACE,CAAC,CACC,KAAK,YAAY,IAAI;gBACrB,CAAC,OAAO,KAAK,CAAC,OAAO,EAAE,KAAK,QAAQ,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,EACD;gBACA,MAAM,IAAI,KAAK,CACV,UAAU,wEAAqE;oBAChF,mDAAmD,CACtD,CAAC;aACH;YACD,KAAK,GAAG,cAAc,CAAC,KAAK,CAAC,CAAC;SAC/B;aAAM,IAAI,QAAQ,CAAC,KAAK,CAAC,cAAc,CAAC,KAAK,IAAI,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE;gBAC5B,MAAM,IAAI,KAAK,CACV,UAAU,4DAAsD,KAAK,QAAI,CAC7E,CAAC;aACH;YACD,KAAK,GAAG,KAAK,CAAC;SACf;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED,SAAS,qBAAqB,CAC5B,UAAsB,EACtB,MAAsB,EACtB,MAAW,EACX,UAAkB;IAElB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;QAC1B,MAAM,IAAI,KAAK,CAAI,UAAU,4BAAyB,CAAC,CAAC;KACzD;IACD,IAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACxC,IAAI,CAAC,WAAW,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,SAAS,GAAG,EAAE,CAAC;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,WAAW,EAAE,MAAM,CAAC,CAAC,CAAC,EAAE,UAAU,CAAC,CAAC;KACzE;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAwB,EACxB,MAAW,EACX,UAAkB;IAElB,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,MAAM,IAAI,KAAK,CAAI,UAAU,6BAA0B,CAAC,CAAC;KAC1D;IACD,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IACpC,IAAI,CAAC,SAAS,IAAI,OAAO,SAAS,KAAK,QAAQ,EAAE;QAC/C,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAU,MAAG,CAAA,CAC1D,CAAC;KACH;IACD,IAAM,cAAc,GAA2B,EAAE,CAAC;IAClD,KAAkB,UAAmB,EAAnB,KAAA,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,EAAnB,cAAmB,EAAnB,IAAmB,EAAE;QAAlC,IAAM,GAAG,SAAA;QACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC,GAAG,CAAC,EAAE,UAAU,GAAG,GAAG,GAAG,GAAG,CAAC,CAAC;KAC5F;IACD,OAAO,cAAc,CAAC;AACxB,CAAC;AAED;;;;GAIG;AACH,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,UAAkB;IAElB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,eAAe,CAAC;IAC7C,IAAI,CAAC,UAAU,EAAE;QACf,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;QACxC,IAAI,CAAC,SAAS,EAAE;YACd,MAAM,IAAI,KAAK,CACb,4BAAyB,UAAU,2CAAoC,IAAI,CAAC,SAAS,CACnF,MAAM,EACN,SAAS,EACT,CAAC,CACF,QAAI,CACN,CAAC;SACH;QAED,IAAM,WAAW,GAAG,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;QACvD,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,sDAAmD,SAAS,QAAI,CAAC,CAAC;SACnF;QACD,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC;QAC9C,IAAI,CAAC,UAAU,EAAE;YACf,MAAM,IAAI,KAAK,CACb,qDAAqD;iBACnD,cAAW,IAAI,CAAC,SAAS,CACvB,WAAW,CACZ,qBAAc,SAAS,wBAAiB,UAAU,QAAI,CAAA,CAC1D,CAAC;SACH;KACF;IAED,OAAO,UAAU,CAAC;AACpB,CAAC;AAED,SAAS,sBAAsB,CAC7B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,UAAkB;;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,MAAM,EAAE,YAAY,CAAC,CAAC;KACzE;IAED,IAAI,MAAM,IAAI,SAAS,EAAE;QACvB,IAAM,OAAO,GAAQ,EAAE,CAAC;QACxB,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;QAC1E,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;YAAtC,IAAM,GAAG,SAAA;YACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;YACvC,IAAI,cAAc,CAAC,QAAQ,EAAE;gBAC3B,SAAS;aACV;YAED,IAAI,QAAQ,SAAoB,CAAC;YACjC,IAAI,YAAY,GAAQ,OAAO,CAAC;YAChC,IAAI,UAAU,CAAC,KAAK,EAAE;gBACpB,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC;iBACnC;qBAAM;oBACL,QAAQ,GAAG,cAAc,CAAC,cAAc,IAAI,cAAc,CAAC,OAAO,CAAC;iBACpE;aACF;iBAAM;gBACL,IAAM,KAAK,GAAG,kBAAkB,CAAC,cAAc,CAAC,cAAe,CAAC,CAAC;gBACjE,QAAQ,GAAG,KAAK,CAAC,GAAG,EAAE,CAAC;gBAEvB,KAAuB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;oBAAzB,IAAM,QAAQ,cAAA;oBACjB,IAAM,WAAW,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;oBAC3C,IAAI,WAAW,IAAI,SAAS,IAAI,MAAM,CAAC,GAAG,CAAC,IAAI,SAAS,EAAE;wBACxD,YAAY,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;qBAC7B;oBACD,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;iBACvC;aACF;YAED,IAAI,YAAY,IAAI,SAAS,EAAE;gBAC7B,IAAM,kBAAkB,GACtB,cAAc,CAAC,cAAc,KAAK,EAAE;oBAClC,CAAC,CAAC,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC,cAAc;oBAClD,CAAC,CAAC,UAAU,CAAC;gBAEjB,IAAI,WAAW,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;gBAC9B,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;gBAC5F,IACE,wBAAwB;oBACxB,wBAAwB,CAAC,UAAU,KAAK,GAAG;oBAC3C,WAAW,IAAI,SAAS,EACxB;oBACA,WAAW,GAAG,MAAM,CAAC,cAAc,CAAC;iBACrC;gBAED,IAAM,eAAe,GAAG,UAAU,CAAC,SAAS,CAC1C,cAAc,EACd,WAAW,EACX,kBAAkB,CACnB,CAAC;gBACF,IAAI,eAAe,KAAK,SAAS,IAAI,QAAQ,IAAI,SAAS,EAAE;oBAC1D,IAAI,cAAc,CAAC,cAAc,EAAE;wBACjC,oDAAoD;wBACpD,2DAA2D;wBAC3D,gCAAgC;wBAChC,YAAY,CAAC,CAAC,GAAG,YAAY,CAAC,CAAC,IAAI,EAAE,CAAC;wBACtC,YAAY,CAAC,CAAC,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC5C;yBAAM,IAAI,cAAc,CAAC,YAAY,EAAE;wBACtC,YAAY,CAAC,QAAQ,CAAC,aAAK,GAAC,cAAc,CAAC,cAAe,IAAG,eAAe,KAAE,CAAC;qBAChF;yBAAM;wBACL,YAAY,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC;qBAC1C;iBACF;aACF;SACF;QAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;QACpE,IAAI,0BAA0B,EAAE;YAC9B,IAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;oCAC/B,cAAc;gBACvB,IAAM,oBAAoB,GAAG,SAAS,CAAC,KAAK,CAAC,UAAC,EAAE,IAAK,OAAA,EAAE,KAAK,cAAc,EAArB,CAAqB,CAAC,CAAC;gBAC5E,IAAI,oBAAoB,EAAE;oBACxB,OAAO,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,SAAS,CAC5C,0BAA0B,EAC1B,MAAM,CAAC,cAAc,CAAC,EACtB,UAAU,GAAG,IAAI,GAAG,cAAc,GAAG,IAAI,CAC1C,CAAC;iBACH;;YARH,KAAK,IAAM,cAAc,IAAI,MAAM;wBAAxB,cAAc;aASxB;SACF;QAED,OAAO,OAAO,CAAC;KAChB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,oBAAoB,CAAC,YAAoB;IAChD,OAAO,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC3C,CAAC;AAED,SAAS,wBAAwB,CAC/B,UAAsB,EACtB,MAAuB,EACvB,YAAiB,EACjB,UAAkB;IAElB,IAAI,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,EAAE;QAC9D,MAAM,GAAG,oBAAoB,CAAC,UAAU,EAAE,MAAM,EAAE,YAAY,EAAE,gBAAgB,CAAC,CAAC;KACnF;IAED,IAAM,UAAU,GAAG,sBAAsB,CAAC,UAAU,EAAE,MAAM,EAAE,UAAU,CAAC,CAAC;IAC1E,IAAI,QAAQ,GAA2B,EAAE,CAAC;IAC1C,IAAM,oBAAoB,GAAa,EAAE,CAAC;IAE1C,KAAkB,UAAuB,EAAvB,KAAA,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,EAAvB,cAAuB,EAAvB,IAAuB,EAAE;QAAtC,IAAM,GAAG,SAAA;QACZ,IAAM,cAAc,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,cAAe,CAAC,CAAC;QAClE,oBAAoB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5B,IAAA,cAAc,GAA8B,cAAc,eAA5C,EAAE,OAAO,GAAqB,cAAc,QAAnC,EAAE,cAAc,GAAK,cAAc,eAAnB,CAAoB;QACnE,IAAI,kBAAkB,GAAG,UAAU,CAAC;QACpC,IAAI,cAAc,KAAK,EAAE,IAAI,cAAc,KAAK,SAAS,EAAE;YACzD,kBAAkB,GAAG,UAAU,GAAG,GAAG,GAAG,cAAc,CAAC;SACxD;QAED,IAAM,sBAAsB,GAAI,cAAmC,CAAC,sBAAsB,CAAC;QAC3F,IAAI,sBAAsB,EAAE;YAC1B,IAAM,UAAU,GAAQ,EAAE,CAAC;YAC3B,KAAwB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;gBAA9C,IAAM,SAAS,SAAA;gBAClB,IAAI,SAAS,CAAC,UAAU,CAAC,sBAAsB,CAAC,EAAE;oBAChD,UAAU,CAAC,SAAS,CAAC,SAAS,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CACpF,cAAmC,CAAC,IAAI,CAAC,KAAK,EAC/C,YAAY,CAAC,SAAS,CAAC,EACvB,kBAAkB,CACnB,CAAC;iBACH;gBAED,oBAAoB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;aACtC;YACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC;SAC5B;aAAM,IAAI,UAAU,CAAC,KAAK,EAAE;YAC3B,IAAI,cAAc,CAAC,cAAc,IAAI,YAAY,CAAC,CAAC,EAAE;gBACnD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,YAAY,CAAC,CAAC,CAAC,OAAQ,CAAC,EACxB,kBAAkB,CACnB,CAAC;aACH;iBAAM;gBACL,IAAM,YAAY,GAAG,cAAc,IAAI,OAAO,IAAI,cAAc,CAAC;gBACjE,IAAI,iBAAiB,GAAG,YAAY,CAAC,YAAa,CAAC,CAAC;gBACpD,IAAI,cAAc,CAAC,YAAY,EAAE;oBAC/B,iBAAiB,GAAG,YAAY,CAAC,OAAQ,CAAC,CAAC;oBAC3C,iBAAiB,GAAG,iBAAiB,IAAI,iBAAiB,CAAC,cAAe,CAAC,CAAC;oBAE5E,IAAM,kBAAkB,GAAG,iBAAiB,KAAK,SAAS,CAAC;oBAC3D,IAAI,kBAAkB,EAAE;wBACtB,iBAAiB,GAAG,EAAE,CAAC;qBACxB;iBACF;gBACD,QAAQ,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CACpC,cAAc,EACd,iBAAiB,EACjB,kBAAkB,CACnB,CAAC;aACH;SACF;aAAM;YACL,kFAAkF;YAClF,IAAI,gBAAgB,SAAA,CAAC;YACrB,IAAI,GAAG,GAAG,YAAY,CAAC;YACvB,sCAAsC;YACtC,KAAmB,UAAK,EAAL,eAAK,EAAL,mBAAK,EAAL,IAAK,EAAE;gBAArB,IAAM,IAAI,cAAA;gBACb,IAAI,CAAC,GAAG;oBAAE,MAAM;gBAChB,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,CAAC;aACjB;YACD,gBAAgB,GAAG,GAAG,CAAC;YACvB,IAAM,wBAAwB,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAwB,CAAC;YACtE,sEAAsE;YACtE,yEAAyE;YACzE,kFAAkF;YAClF,kFAAkF;YAClF,gGAAgG;YAChG,8FAA8F;YAC9F,qFAAqF;YACrF,mFAAmF;YACnF,sFAAsF;YACtF,IACE,wBAAwB;gBACxB,GAAG,KAAK,wBAAwB,CAAC,UAAU;gBAC3C,gBAAgB,IAAI,SAAS,EAC7B;gBACA,gBAAgB,GAAG,MAAM,CAAC,cAAc,CAAC;aAC1C;YAED,IAAI,eAAe,SAAA,CAAC;YACpB,SAAS;YACT,IAAI,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,GAAG,CAAC,CAAC,IAAI,UAAU,CAAC,GAAG,CAAC,CAAC,cAAc,KAAK,EAAE,EAAE;gBAC7E,gBAAgB,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;gBACrC,IAAM,aAAa,GAAG,UAAU,CAAC,WAAW,CAC1C,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,yFAAyF;gBACzF,6CAA6C;gBAC7C,KAA2B,UAAwB,EAAxB,KAAA,MAAM,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAA1C,IAAA,WAAY,EAAX,KAAG,QAAA,EAAE,KAAK,QAAA;oBACpB,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,KAAG,CAAC,EAAE;wBACtC,aAAa,CAAC,KAAG,CAAC,GAAG,KAAK,CAAC;qBAC5B;iBACF;gBACD,QAAQ,GAAG,aAAa,CAAC;aAC1B;iBAAM,IAAI,gBAAgB,KAAK,SAAS,IAAI,cAAc,CAAC,YAAY,KAAK,SAAS,EAAE;gBACtF,eAAe,GAAG,UAAU,CAAC,WAAW,CACtC,cAAc,EACd,gBAAgB,EAChB,kBAAkB,CACnB,CAAC;gBACF,QAAQ,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC;aACjC;SACF;KACF;IAED,IAAM,0BAA0B,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC;IACpE,IAAI,0BAA0B,EAAE;QAC9B,IAAM,oBAAoB,GAAG,UAAC,gBAAwB;YACpD,KAAK,IAAM,cAAc,IAAI,UAAU,EAAE;gBACvC,IAAM,KAAK,GAAG,kBAAkB,CAAC,UAAU,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC;gBAC5E,IAAI,KAAK,CAAC,CAAC,CAAC,KAAK,gBAAgB,EAAE;oBACjC,OAAO,KAAK,CAAC;iBACd;aACF;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC;QAEF,KAAK,IAAM,gBAAgB,IAAI,YAAY,EAAE;YAC3C,IAAI,oBAAoB,CAAC,gBAAgB,CAAC,EAAE;gBAC1C,QAAQ,CAAC,gBAAgB,CAAC,GAAG,UAAU,CAAC,WAAW,CACjD,0BAA0B,EAC1B,YAAY,CAAC,gBAAgB,CAAC,EAC9B,UAAU,GAAG,IAAI,GAAG,gBAAgB,GAAG,IAAI,CAC5C,CAAC;aACH;SACF;KACF;SAAM,IAAI,YAAY,EAAE;QACvB,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,IACE,QAAQ,CAAC,GAAG,CAAC,KAAK,SAAS;gBAC3B,CAAC,oBAAoB,CAAC,QAAQ,CAAC,GAAG,CAAC;gBACnC,CAAC,oBAAoB,CAAC,GAAG,CAAC,EAC1B;gBACA,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC;aACnC;SACF;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,yBAAyB,CAChC,UAAsB,EACtB,MAAwB,EACxB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC;IAChC,IAAI,CAAC,KAAK,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QACvC,MAAM,IAAI,KAAK,CACb,6DAA2D;aACzD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAM,cAAc,GAA2B,EAAE,CAAC;QAClD,KAAkB,UAAyB,EAAzB,KAAA,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;YAAxC,IAAM,GAAG,SAAA;YACZ,cAAc,CAAC,GAAG,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,KAAK,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,UAAU,CAAC,CAAC;SACpF;QACD,OAAO,cAAc,CAAC;KACvB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,uBAAuB,CAC9B,UAAsB,EACtB,MAAsB,EACtB,YAAiB,EACjB,UAAkB;IAElB,2BAA2B;IAC3B,IAAM,OAAO,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC;IACpC,IAAI,CAAC,OAAO,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC3C,MAAM,IAAI,KAAK,CACb,yDAAwD;aACtD,8CAA0C,UAAY,CAAA,CACzD,CAAC;KACH;IACD,IAAI,YAAY,EAAE;QAChB,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,EAAE;YAChC,+FAA+F;YAC/F,YAAY,GAAG,CAAC,YAAY,CAAC,CAAC;SAC/B;QAED,IAAM,SAAS,GAAG,EAAE,CAAC;QACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC5C,SAAS,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC,CAAC,EAAK,UAAU,SAAI,CAAC,MAAG,CAAC,CAAC;SACxF;QACD,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,SAAS,oBAAoB,CAC3B,UAAsB,EACtB,MAAuB,EACvB,MAAW,EACX,uBAAwD;IAExD,IAAM,wBAAwB,GAAG,sCAAsC,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAC5F,IAAI,wBAAwB,EAAE;QAC5B,IAAM,iBAAiB,GAAG,wBAAwB,CAAC,uBAAuB,CAAC,CAAC;QAC5E,IAAI,iBAAiB,IAAI,SAAS,EAAE;YAClC,IAAM,kBAAkB,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;YACrD,IAAI,kBAAkB,IAAI,SAAS,EAAE;gBACnC,IAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,IAAI,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC;gBACjE,IAAM,kBAAkB,GACtB,kBAAkB,KAAK,QAAQ;oBAC7B,CAAC,CAAC,kBAAkB;oBACpB,CAAC,CAAC,QAAQ,GAAG,GAAG,GAAG,kBAAkB,CAAC;gBAC1C,IAAM,iBAAiB,GAAG,UAAU,CAAC,YAAY,CAAC,cAAc,CAAC,kBAAkB,CAAC,CAAC;gBACrF,IAAI,iBAAiB,EAAE;oBACrB,MAAM,GAAG,iBAAiB,CAAC;iBAC5B;aACF;SACF;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,UAAsB,EACtB,MAAuB;IAEvB,OAAO,CACL,MAAM,CAAC,IAAI,CAAC,wBAAwB;QACpC,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC;QACrE,iCAAiC,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CACrE,CAAC;AACJ,CAAC;AAED,SAAS,iCAAiC,CAAC,UAAsB,EAAE,QAAiB;IAClF,OAAO,CACL,QAAQ;QACR,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;QACjC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAChE,CAAC;AACJ,CAAC;AAoHD,0BAA0B;AAC1B,MAAM,UAAU,eAAe,CAAC,WAAgB;IAC9C,IAAI,WAAW,IAAI,SAAS;QAAE,OAAO,SAAS,CAAC;IAC/C,IAAI,WAAW,YAAY,UAAU,EAAE;QACrC,WAAW,GAAG,MAAM,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC;QAClD,OAAO,WAAW,CAAC;KACpB;SAAM,IAAI,WAAW,YAAY,IAAI,EAAE;QACtC,OAAO,WAAW,CAAC,WAAW,EAAE,CAAC;KAClC;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,EAAE;QACrC,IAAM,KAAK,GAAG,EAAE,CAAC;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC3C,KAAK,CAAC,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SAC7C;QACD,OAAO,KAAK,CAAC;KACd;SAAM,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;QAC1C,IAAM,UAAU,GAA2B,EAAE,CAAC;QAC9C,KAAK,IAAM,QAAQ,IAAI,WAAW,EAAE;YAClC,UAAU,CAAC,QAAQ,CAAC,GAAG,eAAe,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC;SAC/D;QACD,OAAO,UAAU,CAAC;KACnB;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AAED;;GAEG;AACH,SAAS,OAAO,CAAmB,CAAW;IAC5C,IAAM,MAAM,GAAQ,EAAE,CAAC;IACvB,KAAkB,UAAC,EAAD,OAAC,EAAD,eAAC,EAAD,IAAC,EAAE;QAAhB,IAAM,GAAG,UAAA;QACZ,MAAM,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,IAAM,UAAU,GAAG,OAAO,CAAC;IAChC,WAAW;IACX,SAAS;IACT,WAAW;IACX,WAAW;IACX,MAAM;IACN,UAAU;IACV,iBAAiB;IACjB,YAAY;IACZ,MAAM;IACN,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,QAAQ;IACR,UAAU;IACV,UAAU;CACX,CAAC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts new file mode 100644 index 0000000000..0a54a863fc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts @@ -0,0 +1,168 @@ +/// +import { TokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { ParameterPath } from "./operationParameter"; +import { OperationSpec } from "./operationSpec"; +import { DeserializationContentTypes } from "./policies/deserializationPolicy"; +import { RedirectOptions } from "./policies/redirectPolicy"; +import { RequestPolicyFactory } from "./policies/requestPolicy"; +import { Mapper, Serializer } from "./serializer"; +import { RequestPrepareOptions, WebResourceLike } from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { Agent } from "http"; +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: RequestPolicyFactory[] | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export declare class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient; + private readonly _requestPolicyOptions; + private readonly _requestPolicyFactories; + private readonly _withCredentials; + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor(credentials?: ServiceClientCredentials | TokenCredential, options?: ServiceClientOptions); + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest(operationArguments: OperationArguments, operationSpec: OperationSpec, callback?: ServiceCallback): Promise; +} +export declare function serializeRequestBody(serviceClient: ServiceClient, httpRequest: WebResourceLike, operationArguments: OperationArguments, operationSpec: OperationSpec): void; +export declare type PropertyParent = { + [propertyName: string]: any; +}; +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export declare function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent; +export declare function getOperationArgumentValueFromParameterPath(serviceClient: ServiceClient, operationArguments: OperationArguments, parameterPath: ParameterPath, parameterMapper: Mapper, serializer: Serializer): any; +export declare function flattenResponse(_response: HttpOperationResponse, responseSpec: OperationResponse | undefined): RestResponse; +//# sourceMappingURL=serviceClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map new file mode 100644 index 0000000000..2668428681 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.d.ts","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":";AAGA,OAAO,EAAE,eAAe,EAAqB,MAAM,kBAAkB,CAAC;AACtE,OAAO,EAAE,wBAAwB,EAAE,MAAM,wCAAwC,CAAC;AAElF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,qBAAqB,EAAE,YAAY,EAAE,MAAM,yBAAyB,CAAC;AAC9E,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAC1D,OAAO,EAIL,aAAa,EACd,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAqB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AACnE,OAAO,EAEL,2BAA2B,EAC5B,MAAM,kCAAkC,CAAC;AAQ1C,OAAO,EAA0B,eAAe,EAAkB,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAEL,oBAAoB,EAGrB,MAAM,0BAA0B,CAAC;AAKlC,OAAO,EAAqC,MAAM,EAAc,UAAU,EAAE,MAAM,cAAc,CAAC;AAIjG,OAAO,EAEL,qBAAqB,EACrB,eAAe,EAGhB,MAAM,eAAe,CAAC;AACvB,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,eAAe,EAAE,MAAM,cAAc,CAAC;AAI/C,OAAO,EAAE,KAAK,EAAE,MAAM,MAAM,CAAC;AAM7B;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,KAAK,CAAC;IACZ,KAAK,EAAE,KAAK,CAAC;CACd;AAED;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC;;;;OAIG;IACH,sBAAsB,CAAC,EACnB,oBAAoB,EAAE,GACtB,CAAC,CAAC,6BAA6B,EAAE,oBAAoB,EAAE,KAAK,IAAI,GAAG,oBAAoB,EAAE,CAAC,CAAC;IAC/F;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAC;IACxB;;OAEG;IACH,kBAAkB,CAAC,EAAE,kBAAkB,CAAC;IACxC;;OAEG;IACH,aAAa,CAAC,EAAE,OAAO,CAAC;IACxB;;OAEG;IACH,0BAA0B,CAAC,EAAE,MAAM,CAAC;IACpC;;OAEG;IACH,6BAA6B,CAAC,EAAE,OAAO,CAAC;IACxC;;;OAGG;IACH,eAAe,CAAC,EAAE,OAAO,CAAC;IAC1B;;;OAGG;IACH,yBAAyB,CAAC,EAAE,MAAM,CAAC;IACnC;;OAEG;IACH,2BAA2B,CAAC,EAAE,2BAA2B,CAAC;IAC1D;;;;OAIG;IACH,mBAAmB,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,0BAA0B,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAChF;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,CAAC,CAAC,gBAAgB,EAAE,MAAM,KAAK,MAAM,CAAC,CAAC;IAC5D;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,CAAC,EAAE,eAAe,CAAC;IAClC;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED;;;GAGG;AACH,qBAAa,aAAa;IACxB;;;;;;;OAOG;IACH,SAAS,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAE3B;;;OAGG;IACH,SAAS,CAAC,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAEtC;;OAEG;IACH,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa;IACzC,OAAO,CAAC,QAAQ,CAAC,qBAAqB,CAA2B;IAEjE,OAAO,CAAC,QAAQ,CAAC,uBAAuB,CAAyB;IACjE,OAAO,CAAC,QAAQ,CAAC,gBAAgB,CAAU;IAE3C;;;;;OAKG;gBAED,WAAW,CAAC,EAAE,wBAAwB,GAAG,eAAe,EACxD,OAAO,CAAC,EAAE,oBAAoB;IAiDhC;;OAEG;IACH,WAAW,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;IA8B7F;;;;;OAKG;IACH,oBAAoB,CAClB,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,QAAQ,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,GAC9B,OAAO,CAAC,YAAY,CAAC;CAkMzB;AAED,wBAAgB,oBAAoB,CAClC,aAAa,EAAE,aAAa,EAC5B,WAAW,EAAE,eAAe,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,GAC3B,IAAI,CAsEN;AAkFD,oBAAY,cAAc,GAAG;IAAE,CAAC,YAAY,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,CAAC;AAE7D;;;GAGG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,cAAc,EAAE,YAAY,EAAE,MAAM,EAAE,GAAG,cAAc,CAYhG;AAiBD,wBAAgB,0CAA0C,CACxD,aAAa,EAAE,aAAa,EAC5B,kBAAkB,EAAE,kBAAkB,EACtC,aAAa,EAAE,aAAa,EAC5B,eAAe,EAAE,MAAM,EACvB,UAAU,EAAE,UAAU,GACrB,GAAG,CAkEL;AA6BD,wBAAgB,eAAe,CAC7B,SAAS,EAAE,qBAAqB,EAChC,YAAY,EAAE,iBAAiB,GAAG,SAAS,GAC1C,YAAY,CAqEd"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js new file mode 100644 index 0000000000..89b329bb2c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js @@ -0,0 +1,488 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { __assign, __spreadArrays } from "tslib"; +import { isTokenCredential } from "@azure/core-auth"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; +import { isStreamOperation } from "./operationSpec"; +import { deserializationPolicy, } from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { userAgentPolicy, getDefaultUserAgentHeaderName, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { RequestPolicyOptions, } from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { MapperType } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { isWebResourceLike, WebResource, } from "./webResource"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { AzureIdentityCredentialAdapter, azureResourceManagerEndpoints, } from "./credentials/azureIdentityTokenCredentialAdapter"; +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +var ServiceClient = /** @class */ (function () { + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + function ServiceClient(credentials, options) { + if (!options) { + options = {}; + } + if (options.baseUri) { + this.baseUri = options.baseUri; + } + var serviceClientCredentials; + if (isTokenCredential(credentials)) { + var scope = undefined; + if ((options === null || options === void 0 ? void 0 : options.baseUri) && azureResourceManagerEndpoints.includes(options === null || options === void 0 ? void 0 : options.baseUri)) { + scope = options.baseUri + "/.default"; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } + else { + serviceClientCredentials = credentials; + } + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + var requestPolicyFactories; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } + else { + requestPolicyFactories = createDefaultRequestPolicyFactories(serviceClientCredentials, options); + if (options.requestPolicyFactories) { + var newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + /** + * Send the provided httpRequest. + */ + ServiceClient.prototype.sendRequest = function (options) { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + var httpRequest; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } + else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } + catch (error) { + return Promise.reject(error); + } + var httpPipeline = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (var i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); + } + } + return httpPipeline.sendRequest(httpRequest); + }; + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + ServiceClient.prototype.sendOperationRequest = function (operationArguments, operationSpec, callback) { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + var httpRequest = new WebResource(); + var result; + try { + var baseUri = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); + } + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + var requestUrl = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (var _i = 0, _a = operationSpec.urlParameters; _i < _a.length; _i++) { + var urlParameter = _a[_i]; + var urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter)); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll("{" + (urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)) + "}", urlParameterValue); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (var _b = 0, _c = operationSpec.queryParameters; _b < _c.length; _b++) { + var queryParameter = _c[_b]; + var queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } + else { + for (var index in queryParameterValue) { + var item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } + else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (var index in queryParameterValue) { + if (queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if (queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + httpRequest.url = requestUrl.toString(); + var contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + if (operationSpec.headerParameters) { + for (var _d = 0, _e = operationSpec.headerParameters; _d < _e.length; _d++) { + var headerParameter = _e[_d]; + var headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + var headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (var _f = 0, _g = Object.keys(headerValue); _f < _g.length; _f++) { + var key = _g[_f]; + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + httpRequest.headers.set(headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + var options = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (var customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + httpRequest.withCredentials = this._withCredentials; + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + result = this.sendRequest(httpRequest).then(function (res) { + return flattenResponse(res, operationSpec.responses[res.status]); + }); + } + catch (error) { + result = Promise.reject(error); + } + var cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then(function (res) { return cb(null, res._response.parsedBody, res._response.request, res._response); }) + .catch(function (err) { return cb(err); }); + } + return result; + }; + return ServiceClient; +}()); +export { ServiceClient }; +export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); + var bodyMapper = operationSpec.requestBody.mapper; + var required = bodyMapper.required, xmlName = bodyMapper.xmlName, xmlElementName = bodyMapper.xmlElementName, serializedName = bodyMapper.serializedName; + var typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + var requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString); + var isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML(utils.prepareXMLRootList(httpRequest.body, xmlElementName || xmlName || serializedName), { rootName: xmlName || serializedName }); + } + else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } + else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } + catch (error) { + throw new Error("Error \"" + error.message + "\" occurred in serializing the payload - " + JSON.stringify(serializedName, undefined, " ") + "."); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (var _i = 0, _a = operationSpec.formDataParameters; _i < _a.length; _i++) { + var formDataParameter = _a[_i]; + var formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); + if (formDataParameterValue != undefined) { + var formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter)); + } + } + } +} +function isRequestPolicyFactory(instance) { + return typeof instance.create === "function"; +} +function getValueOrFunctionResult(value, defaultValueCreator) { + var result; + if (typeof value === "string") { + result = value; + } + else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} +function createDefaultRequestPolicyFactories(credentials, options) { + var factories = []; + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } + else { + factories.push(signingPolicy(credentials)); + } + } + var userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); + var userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + var redirectOptions = __assign(__assign({}, DefaultRedirectOptions), options.redirectOptions); + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + factories.push(deserializationPolicy(options.deserializationContentTypes)); + var proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + return factories; +} +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent, propertyPath) { + if (parent && propertyPath) { + var propertyPathLength = propertyPath.length; + for (var i = 0; i < propertyPathLength - 1; ++i) { + var propertyName = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} +function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { + return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); +} +export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { + var value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + var propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + var useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + // Serialize just for validation purposes. + var parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (var propertyName in parameterPath) { + var propertyMapper = parameterMapper.type.modelProperties[propertyName]; + var propertyPath = parameterPath[propertyName]; + var propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); + // Serialize just for validation purposes. + var propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + var result = { propertyFound: false }; + var i = 0; + for (; i < parameterPath.length; ++i) { + var parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +export function flattenResponse(_response, responseSpec) { + var parsedHeaders = _response.parsedHeaders; + var bodyMapper = responseSpec && responseSpec.bodyMapper; + var addOperationResponse = function (obj) { + return Object.defineProperty(obj, "_response", { + value: _response, + }); + }; + if (bodyMapper) { + var typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); + } + var modelProperties_1 = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; + var isPageableResponse = Object.keys(modelProperties_1).some(function (k) { return modelProperties_1[k].serializedName === ""; }); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + var parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + var arrayResponse = __spreadArrays(parsedBody); + for (var _i = 0, _a = Object.keys(modelProperties_1); _i < _a.length; _i++) { + var key = _a[_i]; + if (modelProperties_1[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + if (parsedHeaders) { + for (var _b = 0, _c = Object.keys(parsedHeaders); _b < _c.length; _b++) { + var key = _c[_b]; + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); + } + } + if (bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody)) { + // primitive body types and HEAD booleans + return addOperationResponse(__assign(__assign({}, parsedHeaders), { body: _response.parsedBody })); + } + return addOperationResponse(__assign(__assign({}, parsedHeaders), _response.parsedBody)); +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map new file mode 100644 index 0000000000..fd4afe62bb --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/serviceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serviceClient.js","sourceRoot":"","sources":["../../lib/serviceClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,OAAO,EAAmB,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAEtE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AAKxD,OAAO,EACL,0BAA0B,EAC1B,8BAA8B,GAG/B,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,iBAAiB,EAAiB,MAAM,iBAAiB,CAAC;AACnE,OAAO,EACL,qBAAqB,GAEtB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,6BAA6B,EAAE,MAAM,0CAA0C,CAAC;AACzF,OAAO,EACL,eAAe,EACf,6BAA6B,EAC7B,wBAAwB,GACzB,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,sBAAsB,EAAmB,cAAc,EAAE,MAAM,2BAA2B,CAAC;AACpG,OAAO,EAGL,oBAAoB,GAErB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,aAAa,EAAE,MAAM,0BAA0B,CAAC;AACzD,OAAO,EAAE,sBAAsB,EAAE,MAAM,mCAAmC,CAAC;AAC3E,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAA6C,UAAU,EAAc,MAAM,cAAc,CAAC;AACjG,OAAO,EAAE,UAAU,EAAE,MAAM,OAAO,CAAC;AACnC,OAAO,KAAK,KAAK,MAAM,cAAc,CAAC;AACtC,OAAO,EAAE,YAAY,EAAE,MAAM,YAAY,CAAC;AAC1C,OAAO,EAIL,iBAAiB,EACjB,WAAW,GACZ,MAAM,eAAe,CAAC;AAGvB,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AACrD,OAAO,EAAE,WAAW,EAAE,uBAAuB,EAAE,MAAM,wBAAwB,CAAC;AAC9E,OAAO,EAAE,qBAAqB,EAAE,MAAM,kCAAkC,CAAC;AAEzE,OAAO,EACL,8BAA8B,EAC9B,6BAA6B,GAC9B,MAAM,mDAAmD,CAAC;AAqG3D;;;GAGG;AACH;IA0BE;;;;;OAKG;IACH,uBACE,WAAwD,EACxD,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;SAChC;QAED,IAAI,wBAA8D,CAAC;QACnE,IAAI,iBAAiB,CAAC,WAAW,CAAC,EAAE;YAClC,IAAI,KAAK,GAAuB,SAAS,CAAC;YAC1C,IAAI,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,KAAI,6BAA6B,CAAC,QAAQ,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC,EAAE;gBAChF,KAAK,GAAM,OAAO,CAAC,OAAO,cAAW,CAAC;aACvC;YACD,wBAAwB,GAAG,IAAI,8BAA8B,CAAC,WAAW,EAAE,KAAK,CAAC,CAAC;SACnF;aAAM;YACL,wBAAwB,GAAG,WAAW,CAAC;SACxC;QAED,IAAI,wBAAwB,IAAI,CAAC,wBAAwB,CAAC,WAAW,EAAE;YACrE,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;QAED,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,eAAe,IAAI,KAAK,CAAC;QACzD,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,UAAU,IAAI,IAAI,iBAAiB,EAAE,CAAC;QACjE,IAAI,CAAC,qBAAqB,GAAG,IAAI,oBAAoB,CAAC,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAElF,IAAI,sBAA8C,CAAC;QACnD,IAAI,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,sBAAsB,CAAC,EAAE;YACjD,sBAAsB,GAAG,OAAO,CAAC,sBAAsB,CAAC;SACzD;aAAM;YACL,sBAAsB,GAAG,mCAAmC,CAC1D,wBAAwB,EACxB,OAAO,CACR,CAAC;YACF,IAAI,OAAO,CAAC,sBAAsB,EAAE;gBAClC,IAAM,yBAAyB,GAEF,OAAO,CAAC,sBAAsB,CAAC,sBAAsB,CAAC,CAAC;gBACpF,IAAI,yBAAyB,EAAE;oBAC7B,sBAAsB,GAAG,yBAAyB,CAAC;iBACpD;aACF;SACF;QACD,IAAI,CAAC,uBAAuB,GAAG,sBAAsB,CAAC;IACxD,CAAC;IAED;;OAEG;IACH,mCAAW,GAAX,UAAY,OAAgD;QAC1D,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;YAC5E,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,IAAI,WAA4B,CAAC;QACjC,IAAI;YACF,IAAI,iBAAiB,CAAC,OAAO,CAAC,EAAE;gBAC9B,OAAO,CAAC,yBAAyB,EAAE,CAAC;gBACpC,WAAW,GAAG,OAAO,CAAC;aACvB;iBAAM;gBACL,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;gBAChC,WAAW,GAAG,WAAW,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC;aAC5C;SACF;QAAC,OAAO,KAAK,EAAE;YACd,OAAO,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAC9B;QAED,IAAI,YAAY,GAAkB,IAAI,CAAC,WAAW,CAAC;QACnD,IAAI,IAAI,CAAC,uBAAuB,IAAI,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC3E,KAAK,IAAI,CAAC,GAAG,IAAI,CAAC,uBAAuB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,EAAE,CAAC,EAAE;gBACjE,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC,CAAC,CAAC,CAAC,MAAM,CACnD,YAAY,EACZ,IAAI,CAAC,qBAAqB,CAC3B,CAAC;aACH;SACF;QACD,OAAO,YAAY,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,4CAAoB,GAApB,UACE,kBAAsC,EACtC,aAA4B,EAC5B,QAA+B;QAE/B,IAAI,OAAO,kBAAkB,CAAC,OAAO,KAAK,UAAU,EAAE;YACpD,QAAQ,GAAG,kBAAkB,CAAC,OAAO,CAAC;YACtC,kBAAkB,CAAC,OAAO,GAAG,SAAS,CAAC;SACxC;QAED,IAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QAEtC,IAAI,MAA6B,CAAC;QAClC,IAAI;YACF,IAAM,OAAO,GAAuB,aAAa,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAC1E,IAAI,CAAC,OAAO,EAAE;gBACZ,MAAM,IAAI,KAAK,CACb,0IAA0I,CAC3I,CAAC;aACH;YAED,WAAW,CAAC,MAAM,GAAG,aAAa,CAAC,UAAU,CAAC;YAC9C,WAAW,CAAC,aAAa,GAAG,aAAa,CAAC;YAE1C,IAAM,UAAU,GAAe,UAAU,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACzD,IAAI,aAAa,CAAC,IAAI,EAAE;gBACtB,UAAU,CAAC,UAAU,CAAC,aAAa,CAAC,IAAI,CAAC,CAAC;aAC3C;YACD,IAAI,aAAa,CAAC,aAAa,IAAI,aAAa,CAAC,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;gBACzE,KAA2B,UAA2B,EAA3B,KAAA,aAAa,CAAC,aAAa,EAA3B,cAA2B,EAA3B,IAA2B,EAAE;oBAAnD,IAAM,YAAY,SAAA;oBACrB,IAAI,iBAAiB,GAAW,sCAAsC,CACpE,IAAI,EACJ,kBAAkB,EAClB,YAAY,EACZ,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,iBAAiB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACpD,YAAY,CAAC,MAAM,EACnB,iBAAiB,EACjB,0BAA0B,CAAC,YAAY,CAAC,CACzC,CAAC;oBACF,IAAI,CAAC,YAAY,CAAC,YAAY,EAAE;wBAC9B,iBAAiB,GAAG,kBAAkB,CAAC,iBAAiB,CAAC,CAAC;qBAC3D;oBACD,UAAU,CAAC,UAAU,CACnB,OAAI,YAAY,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,YAAY,CAAC,OAAG,EACrF,iBAAiB,CAClB,CAAC;iBACH;aACF;YACD,IAAI,aAAa,CAAC,eAAe,IAAI,aAAa,CAAC,eAAe,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC7E,KAA6B,UAA6B,EAA7B,KAAA,aAAa,CAAC,eAAe,EAA7B,cAA6B,EAA7B,IAA6B,EAAE;oBAAvD,IAAM,cAAc,SAAA;oBACvB,IAAI,mBAAmB,GAAQ,sCAAsC,CACnE,IAAI,EACJ,kBAAkB,EAClB,cAAc,EACd,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,mBAAmB,IAAI,SAAS,EAAE;wBACpC,mBAAmB,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtD,cAAc,CAAC,MAAM,EACrB,mBAAmB,EACnB,0BAA0B,CAAC,cAAc,CAAC,CAC3C,CAAC;wBACF,IAAI,cAAc,CAAC,gBAAgB,IAAI,SAAS,EAAE;4BAChD,IAAI,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK,EAAE;gCACnE,IAAI,mBAAmB,CAAC,MAAM,KAAK,CAAC,EAAE;oCACpC,mBAAmB,GAAG,EAAE,CAAC;iCAC1B;qCAAM;oCACL,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;wCACvC,IAAM,IAAI,GAAG,mBAAmB,CAAC,KAAK,CAAC,CAAC;wCACxC,mBAAmB,CAAC,KAAK,CAAC,GAAG,IAAI,IAAI,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC;qCACvE;iCACF;6BACF;iCAAM,IACL,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;gCAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;gCACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;6BACjF;yBACF;wBACD,IAAI,CAAC,cAAc,CAAC,YAAY,EAAE;4BAChC,IAAI,KAAK,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;gCACtC,KAAK,IAAM,KAAK,IAAI,mBAAmB,EAAE;oCACvC,IACE,mBAAmB,CAAC,KAAK,CAAC,KAAK,SAAS;wCACxC,mBAAmB,CAAC,KAAK,CAAC,KAAK,IAAI,EACnC;wCACA,mBAAmB,CAAC,KAAK,CAAC,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,KAAK,CAAC,CAAC,CAAC;qCAC7E;iCACF;6BACF;iCAAM;gCACL,mBAAmB,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,CAAC;6BAC/D;yBACF;wBACD,IACE,cAAc,CAAC,gBAAgB,IAAI,SAAS;4BAC5C,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,KAAK;4BAC/D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG;4BAC7D,cAAc,CAAC,gBAAgB,KAAK,qBAAqB,CAAC,GAAG,EAC7D;4BACA,mBAAmB,GAAG,mBAAmB,CAAC,IAAI,CAAC,cAAc,CAAC,gBAAgB,CAAC,CAAC;yBACjF;wBACD,UAAU,CAAC,iBAAiB,CAC1B,cAAc,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,cAAc,CAAC,EAClF,mBAAmB,CACpB,CAAC;qBACH;iBACF;aACF;YACD,WAAW,CAAC,GAAG,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC;YAExC,IAAM,WAAW,GAAG,aAAa,CAAC,WAAW,IAAI,IAAI,CAAC,kBAAkB,CAAC;YACzE,IAAI,WAAW,EAAE;gBACf,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,WAAW,CAAC,CAAC;aACtD;YAED,IAAI,aAAa,CAAC,gBAAgB,EAAE;gBAClC,KAA8B,UAA8B,EAA9B,KAAA,aAAa,CAAC,gBAAgB,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;oBAAzD,IAAM,eAAe,SAAA;oBACxB,IAAI,WAAW,GAAQ,sCAAsC,CAC3D,IAAI,EACJ,kBAAkB,EAClB,eAAe,EACf,aAAa,CAAC,UAAU,CACzB,CAAC;oBACF,IAAI,WAAW,IAAI,SAAS,EAAE;wBAC5B,WAAW,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CAC9C,eAAe,CAAC,MAAM,EACtB,WAAW,EACX,0BAA0B,CAAC,eAAe,CAAC,CAC5C,CAAC;wBACF,IAAM,sBAAsB,GAAI,eAAe,CAAC,MAA2B;6BACxE,sBAAsB,CAAC;wBAC1B,IAAI,sBAAsB,EAAE;4BAC1B,KAAkB,UAAwB,EAAxB,KAAA,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;gCAAvC,IAAM,GAAG,SAAA;gCACZ,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,GAAG,EAAE,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;6BACzE;yBACF;6BAAM;4BACL,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,eAAe,CAAC,MAAM,CAAC,cAAc;gCACnC,0BAA0B,CAAC,eAAe,CAAC,EAC7C,WAAW,CACZ,CAAC;yBACH;qBACF;iBACF;aACF;YAED,IAAM,OAAO,GAAmC,kBAAkB,CAAC,OAAO,CAAC;YAC3E,IAAI,OAAO,EAAE;gBACX,IAAI,OAAO,CAAC,aAAa,EAAE;oBACzB,KAAK,IAAM,gBAAgB,IAAI,OAAO,CAAC,aAAa,EAAE;wBACpD,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,gBAAgB,EAAE,OAAO,CAAC,aAAa,CAAC,gBAAgB,CAAC,CAAC,CAAC;qBACpF;iBACF;gBAED,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,WAAW,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;iBAC/C;gBAED,IAAI,OAAO,CAAC,OAAO,EAAE;oBACnB,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;iBACvC;gBAED,IAAI,OAAO,CAAC,gBAAgB,EAAE;oBAC5B,WAAW,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;iBACzD;gBAED,IAAI,OAAO,CAAC,kBAAkB,EAAE;oBAC9B,WAAW,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;iBAC7D;aACF;YAED,WAAW,CAAC,eAAe,GAAG,IAAI,CAAC,gBAAgB,CAAC;YAEpD,oBAAoB,CAAC,IAAI,EAAE,WAAW,EAAE,kBAAkB,EAAE,aAAa,CAAC,CAAC;YAE3E,IAAI,WAAW,CAAC,kBAAkB,IAAI,SAAS,EAAE;gBAC/C,WAAW,CAAC,kBAAkB,GAAG,iBAAiB,CAAC,aAAa,CAAC,CAAC;aACnE;YAED,MAAM,GAAG,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,UAAC,GAAG;gBAC9C,OAAA,eAAe,CAAC,GAAG,EAAE,aAAa,CAAC,SAAS,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YAAzD,CAAyD,CAC1D,CAAC;SACH;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;SAChC;QAED,IAAM,EAAE,GAAG,QAAQ,CAAC;QACpB,IAAI,EAAE,EAAE;YACN,MAAM;gBACJ,2CAA2C;iBAC1C,IAAI,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,IAAI,EAAE,GAAG,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,SAAS,CAAC,EAAxE,CAAwE,CAAC;iBACvF,KAAK,CAAC,UAAC,GAAG,IAAK,OAAA,EAAE,CAAC,GAAG,CAAC,EAAP,CAAO,CAAC,CAAC;SAC5B;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,oBAAC;AAAD,CAAC,AAhUD,IAgUC;;AAED,MAAM,UAAU,oBAAoB,CAClC,aAA4B,EAC5B,WAA4B,EAC5B,kBAAsC,EACtC,aAA4B;IAE5B,IAAI,aAAa,CAAC,WAAW,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,EAAE;QACjE,WAAW,CAAC,IAAI,GAAG,sCAAsC,CACvD,aAAa,EACb,kBAAkB,EAClB,aAAa,CAAC,WAAW,EACzB,aAAa,CAAC,UAAU,CACzB,CAAC;QAEF,IAAM,UAAU,GAAG,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC;QAC5C,IAAA,QAAQ,GAA8C,UAAU,SAAxD,EAAE,OAAO,GAAqC,UAAU,QAA/C,EAAE,cAAc,GAAqB,UAAU,eAA/B,EAAE,cAAc,GAAK,UAAU,eAAf,CAAgB;QACzE,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI;YACF,IAAI,WAAW,CAAC,IAAI,IAAI,SAAS,IAAI,QAAQ,EAAE;gBAC7C,IAAM,8BAA8B,GAAW,0BAA0B,CACvE,aAAa,CAAC,WAAW,CAC1B,CAAC;gBACF,WAAW,CAAC,IAAI,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACnD,UAAU,EACV,WAAW,CAAC,IAAI,EAChB,8BAA8B,CAC/B,CAAC;gBACF,IAAM,QAAQ,GAAG,QAAQ,KAAK,UAAU,CAAC,MAAM,CAAC;gBAChD,IAAI,aAAa,CAAC,KAAK,EAAE;oBACvB,IAAI,QAAQ,KAAK,UAAU,CAAC,QAAQ,EAAE;wBACpC,WAAW,CAAC,IAAI,GAAG,YAAY,CAC7B,KAAK,CAAC,kBAAkB,CACtB,WAAW,CAAC,IAAI,EAChB,cAAc,IAAI,OAAO,IAAI,cAAe,CAC7C,EACD,EAAE,QAAQ,EAAE,OAAO,IAAI,cAAc,EAAE,CACxC,CAAC;qBACH;yBAAM,IAAI,CAAC,QAAQ,EAAE;wBACpB,WAAW,CAAC,IAAI,GAAG,YAAY,CAAC,WAAW,CAAC,IAAI,EAAE;4BAChD,QAAQ,EAAE,OAAO,IAAI,cAAc;yBACpC,CAAC,CAAC;qBACJ;iBACF;qBAAM,IAAI,CAAC,QAAQ,EAAE;oBACpB,WAAW,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;iBACrD;aACF;SACF;QAAC,OAAO,KAAK,EAAE;YACd,MAAM,IAAI,KAAK,CACb,aAAU,KAAK,CAAC,OAAO,iDAA2C,IAAI,CAAC,SAAS,CAC9E,cAAc,EACd,SAAS,EACT,IAAI,CACL,MAAG,CACL,CAAC;SACH;KACF;SAAM,IAAI,aAAa,CAAC,kBAAkB,IAAI,aAAa,CAAC,kBAAkB,CAAC,MAAM,GAAG,CAAC,EAAE;QAC1F,WAAW,CAAC,QAAQ,GAAG,EAAE,CAAC;QAC1B,KAAgC,UAAgC,EAAhC,KAAA,aAAa,CAAC,kBAAkB,EAAhC,cAAgC,EAAhC,IAAgC,EAAE;YAA7D,IAAM,iBAAiB,SAAA;YAC1B,IAAM,sBAAsB,GAAQ,sCAAsC,CACxE,aAAa,EACb,kBAAkB,EAClB,iBAAiB,EACjB,aAAa,CAAC,UAAU,CACzB,CAAC;YACF,IAAI,sBAAsB,IAAI,SAAS,EAAE;gBACvC,IAAM,6BAA6B,GACjC,iBAAiB,CAAC,MAAM,CAAC,cAAc,IAAI,0BAA0B,CAAC,iBAAiB,CAAC,CAAC;gBAC3F,WAAW,CAAC,QAAQ,CAAC,6BAA6B,CAAC,GAAG,aAAa,CAAC,UAAU,CAAC,SAAS,CACtF,iBAAiB,CAAC,MAAM,EACxB,sBAAsB,EACtB,0BAA0B,CAAC,iBAAiB,CAAC,CAC9C,CAAC;aACH;SACF;KACF;AACH,CAAC;AAED,SAAS,sBAAsB,CAAC,QAAa;IAC3C,OAAO,OAAO,QAAQ,CAAC,MAAM,KAAK,UAAU,CAAC;AAC/C,CAAC;AAED,SAAS,wBAAwB,CAC/B,KAA8D,EAC9D,mBAAiC;IAEjC,IAAI,MAAc,CAAC;IACnB,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;QAC7B,MAAM,GAAG,KAAK,CAAC;KAChB;SAAM;QACL,MAAM,GAAG,mBAAmB,EAAE,CAAC;QAC/B,IAAI,OAAO,KAAK,KAAK,UAAU,EAAE;YAC/B,MAAM,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;SACxB;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,mCAAmC,CAC1C,WAAwE,EACxE,OAA6B;IAE7B,IAAM,SAAS,GAA2B,EAAE,CAAC;IAE7C,IAAI,OAAO,CAAC,6BAA6B,EAAE;QACzC,SAAS,CAAC,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC,yBAAyB,CAAC,CAAC,CAAC;KAClF;IAED,IAAI,WAAW,EAAE;QACf,IAAI,sBAAsB,CAAC,WAAW,CAAC,EAAE;YACvC,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC7B;aAAM;YACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC,CAAC;SAC5C;KACF;IAED,IAAM,mBAAmB,GAAW,wBAAwB,CAC1D,OAAO,CAAC,mBAAmB,EAC3B,6BAA6B,CAC9B,CAAC;IACF,IAAM,oBAAoB,GAAW,wBAAwB,CAC3D,OAAO,CAAC,SAAS,EACjB,wBAAwB,CACzB,CAAC;IACF,IAAI,mBAAmB,IAAI,oBAAoB,EAAE;QAC/C,SAAS,CAAC,IAAI,CAAC,eAAe,CAAC,EAAE,GAAG,EAAE,mBAAmB,EAAE,KAAK,EAAE,oBAAoB,EAAE,CAAC,CAAC,CAAC;KAC5F;IAED,IAAM,eAAe,yBAChB,sBAAsB,GACtB,OAAO,CAAC,eAAe,CAC3B,CAAC;IACF,IAAI,eAAe,CAAC,eAAe,EAAE;QACnC,SAAS,CAAC,IAAI,CAAC,cAAc,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,CAAC;KAC5D;IAED,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,0BAA0B,CAAC,CAAC,CAAC;IAEzE,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE;QAC1B,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,sBAAsB,EAAE,CAAC,CAAC;QACzC,SAAS,CAAC,IAAI,CAAC,qBAAqB,EAAE,CAAC,CAAC;KACzC;IAED,SAAS,CAAC,IAAI,CAAC,qBAAqB,CAAC,OAAO,CAAC,2BAA2B,CAAC,CAAC,CAAC;IAE3E,IAAM,aAAa,GAAG,OAAO,CAAC,aAAa,IAAI,uBAAuB,EAAE,CAAC;IACzE,IAAI,aAAa,EAAE;QACjB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,aAAa,CAAC,CAAC,CAAC;KAC5C;IAED,IAAI,OAAO,CAAC,aAAa,EAAE;QACzB,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC;KACpD;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAID;;;GAGG;AACH,MAAM,UAAU,iBAAiB,CAAC,MAAsB,EAAE,YAAsB;IAC9E,IAAI,MAAM,IAAI,YAAY,EAAE;QAC1B,IAAM,kBAAkB,GAAW,YAAY,CAAC,MAAM,CAAC;QACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,kBAAkB,GAAG,CAAC,EAAE,EAAE,CAAC,EAAE;YAC/C,IAAM,YAAY,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;YAC7C,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,EAAE;gBACzB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC;aAC3B;YACD,MAAM,GAAG,MAAM,CAAC,YAAY,CAAC,CAAC;SAC/B;KACF;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,sCAAsC,CAC7C,aAA4B,EAC5B,kBAAsC,EACtC,SAA6B,EAC7B,UAAsB;IAEtB,OAAO,0CAA0C,CAC/C,aAAa,EACb,kBAAkB,EAClB,SAAS,CAAC,aAAa,EACvB,SAAS,CAAC,MAAM,EAChB,UAAU,CACX,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,aAA4B,EAC5B,kBAAsC,EACtC,aAA4B,EAC5B,eAAuB,EACvB,UAAsB;IAEtB,IAAI,KAAU,CAAC;IACf,IAAI,OAAO,aAAa,KAAK,QAAQ,EAAE;QACrC,aAAa,GAAG,CAAC,aAAa,CAAC,CAAC;KACjC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,aAAa,CAAC,EAAE;QAChC,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE;YAC5B,IAAI,eAAe,CAAC,UAAU,EAAE;gBAC9B,KAAK,GAAG,eAAe,CAAC,YAAY,CAAC;aACtC;iBAAM;gBACL,IAAI,oBAAoB,GAAyB,4BAA4B,CAC3E,kBAAkB,EAClB,aAAa,CACd,CAAC;gBACF,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,oBAAoB,GAAG,4BAA4B,CAAC,aAAa,EAAE,aAAa,CAAC,CAAC;iBACnF;gBAED,IAAI,eAAe,GAAG,KAAK,CAAC;gBAC5B,IAAI,CAAC,oBAAoB,CAAC,aAAa,EAAE;oBACvC,eAAe;wBACb,eAAe,CAAC,QAAQ;4BACxB,CAAC,aAAa,CAAC,CAAC,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC;iBAClE;gBACD,KAAK,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC;aAC7F;YAED,0CAA0C;YAC1C,IAAM,mBAAmB,GAAW,8BAA8B,CAChE,aAAa,EACb,eAAe,CAChB,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,eAAe,EAAE,KAAK,EAAE,mBAAmB,CAAC,CAAC;SACnE;KACF;SAAM;QACL,IAAI,eAAe,CAAC,QAAQ,EAAE;YAC5B,KAAK,GAAG,EAAE,CAAC;SACZ;QAED,KAAK,IAAM,YAAY,IAAI,aAAa,EAAE;YACxC,IAAM,cAAc,GAAY,eAAmC,CAAC,IAAI,CAAC,eAAgB,CACvF,YAAY,CACb,CAAC;YACF,IAAM,YAAY,GAAkB,aAAa,CAAC,YAAY,CAAC,CAAC;YAChE,IAAM,aAAa,GAAQ,0CAA0C,CACnE,aAAa,EACb,kBAAkB,EAClB,YAAY,EACZ,cAAc,EACd,UAAU,CACX,CAAC;YACF,0CAA0C;YAC1C,IAAM,kBAAkB,GAAW,8BAA8B,CAC/D,YAAY,EACZ,cAAc,CACf,CAAC;YACF,UAAU,CAAC,SAAS,CAAC,cAAc,EAAE,aAAa,EAAE,kBAAkB,CAAC,CAAC;YACxE,IAAI,aAAa,KAAK,SAAS,EAAE;gBAC/B,IAAI,CAAC,KAAK,EAAE;oBACV,KAAK,GAAG,EAAE,CAAC;iBACZ;gBACD,KAAK,CAAC,YAAY,CAAC,GAAG,aAAa,CAAC;aACrC;SACF;KACF;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAOD,SAAS,4BAA4B,CACnC,MAAwC,EACxC,aAAuB;IAEvB,IAAM,MAAM,GAAyB,EAAE,aAAa,EAAE,KAAK,EAAE,CAAC;IAC9D,IAAI,CAAC,GAAG,CAAC,CAAC;IACV,OAAO,CAAC,GAAG,aAAa,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACpC,IAAM,iBAAiB,GAAW,aAAa,CAAC,CAAC,CAAC,CAAC;QACnD,8EAA8E;QAC9E,IAAI,MAAM,IAAI,SAAS,IAAI,iBAAiB,IAAI,MAAM,EAAE;YACtD,MAAM,GAAG,MAAM,CAAC,iBAAiB,CAAC,CAAC;SACpC;aAAM;YACL,MAAM;SACP;KACF;IACD,IAAI,CAAC,KAAK,aAAa,CAAC,MAAM,EAAE;QAC9B,MAAM,CAAC,aAAa,GAAG,MAAM,CAAC;QAC9B,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC;KAC7B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,UAAU,eAAe,CAC7B,SAAgC,EAChC,YAA2C;IAE3C,IAAM,aAAa,GAAG,SAAS,CAAC,aAAa,CAAC;IAC9C,IAAM,UAAU,GAAG,YAAY,IAAI,YAAY,CAAC,UAAU,CAAC;IAE3D,IAAM,oBAAoB,GAAG,UAAC,GAAO;QACnC,OAAA,MAAM,CAAC,cAAc,CAAC,GAAG,EAAE,WAAW,EAAE;YACtC,KAAK,EAAE,SAAS;SACjB,CAAC;IAFF,CAEE,CAAC;IAEL,IAAI,UAAU,EAAE;QACd,IAAM,QAAQ,GAAG,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;QACtC,IAAI,QAAQ,KAAK,QAAQ,EAAE;YACzB,OAAO,oBAAoB,uBACtB,aAAa,KAChB,QAAQ,EAAE,SAAS,CAAC,QAAQ,EAC5B,kBAAkB,EAAE,SAAS,CAAC,kBAAkB,IAChD,CAAC;SACJ;QAED,IAAM,iBAAe,GACnB,CAAC,QAAQ,KAAK,WAAW,IAAK,UAA8B,CAAC,IAAI,CAAC,eAAe,CAAC,IAAI,EAAE,CAAC;QAC3F,IAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,CAAC,IAAI,CAC1D,UAAC,CAAC,IAAK,OAAA,iBAAe,CAAC,CAAC,CAAC,CAAC,cAAc,KAAK,EAAE,EAAxC,CAAwC,CAChD,CAAC;QACF,IAAI,QAAQ,KAAK,UAAU,IAAI,kBAAkB,EAAE;YACjD,8EAA8E;YAC9E,mDAAmD;YACnD,IAAM,UAAU,GAAG,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,EAAE,CAAC;YACnF,IAAM,aAAa,GAAG,eAAI,UAAU,CAAyB,CAAC;YAE9D,KAAkB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,iBAAe,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAA3C,IAAM,GAAG,SAAA;gBACZ,IAAI,iBAAe,CAAC,GAAG,CAAC,CAAC,cAAc,EAAE;oBACvC,aAAa,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;iBAChD;aACF;YAED,IAAI,aAAa,EAAE;gBACjB,KAAkB,UAA0B,EAA1B,KAAA,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAA1B,cAA0B,EAA1B,IAA0B,EAAE;oBAAzC,IAAM,GAAG,SAAA;oBACZ,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;iBACzC;aACF;YACD,oBAAoB,CAAC,aAAa,CAAC,CAAC;YACpC,OAAO,aAAa,CAAC;SACtB;QAED,IAAI,QAAQ,KAAK,WAAW,IAAI,QAAQ,KAAK,YAAY,EAAE;YACzD,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;SACJ;KACF;IAED,IACE,UAAU;QACV,SAAS,CAAC,OAAO,CAAC,MAAM,KAAK,MAAM;QACnC,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,UAAU,CAAC,EAC3C;QACA,yCAAyC;QACzC,OAAO,oBAAoB,uBACtB,aAAa,KAChB,IAAI,EAAE,SAAS,CAAC,UAAU,IAC1B,CAAC;KACJ;IAED,OAAO,oBAAoB,uBACtB,aAAa,GACb,SAAS,CAAC,UAAU,EACvB,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts new file mode 100644 index 0000000000..710361f8cf --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts @@ -0,0 +1,149 @@ +/** + * A class that handles the query portion of a URLBuilder. + */ +export declare class URLQuery { + private readonly _rawQuery; + /** + * Get whether or not there any query parameters in this URLQuery. + */ + any(): boolean; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + set(parameterName: string, parameterValue: any): void; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + get(parameterName: string): string | string[] | undefined; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + toString(): string; + /** + * Parse a URLQuery from the provided text. + */ + static parse(text: string): URLQuery; +} +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export declare class URLBuilder { + private _scheme; + private _host; + private _port; + private _path; + private _query; + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + setScheme(scheme: string | undefined): void; + /** + * Get the scheme that has been set in this URL. + */ + getScheme(): string | undefined; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + setHost(host: string | undefined): void; + /** + * Get the host that has been set in this URL. + */ + getHost(): string | undefined; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + setPort(port: number | string | undefined): void; + /** + * Get the port that has been set in this URL. + */ + getPort(): string | undefined; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + setPath(path: string | undefined): void; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + appendPath(path: string | undefined): void; + /** + * Get the path that has been set in this URL. + */ + getPath(): string | undefined; + /** + * Set the query in this URL. + */ + setQuery(query: string | undefined): void; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + setQueryParameter(queryParameterName: string, queryParameterValue: any): void; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + getQueryParameterValue(queryParameterName: string): string | string[] | undefined; + /** + * Get the query in this URL. + */ + getQuery(): string | undefined; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set; + toString(): string; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + replaceAll(searchValue: string, replaceValue: string): void; + static parse(text: string): URLBuilder; +} +declare type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; +declare type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; +export declare class URLToken { + readonly text: string; + readonly type: URLTokenType; + constructor(text: string, type: URLTokenType); + static scheme(text: string): URLToken; + static host(text: string): URLToken; + static port(text: string): URLToken; + static path(text: string): URLToken; + static query(text: string): URLToken; +} +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export declare function isAlphaNumericCharacter(character: string): boolean; +/** + * A class that tokenizes URL strings. + */ +export declare class URLTokenizer { + readonly _text: string; + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + constructor(_text: string, state?: URLTokenizerState); + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + current(): URLToken | undefined; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + next(): boolean; +} +export {}; +//# sourceMappingURL=url.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map new file mode 100644 index 0000000000..0c8174a5fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"url.d.ts","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAOA;;GAEG;AACH,qBAAa,QAAQ;IACnB,OAAO,CAAC,QAAQ,CAAC,SAAS,CAA2D;IAErF;;OAEG;IACI,GAAG,IAAI,OAAO;IAIrB;;;;OAIG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,EAAE,cAAc,EAAE,GAAG,GAAG,IAAI;IAW5D;;;OAGG;IACI,GAAG,CAAC,aAAa,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIhE;;OAEG;IACI,QAAQ,IAAI,MAAM;IAoBzB;;OAEG;WACW,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CA0D5C;AAED;;GAEG;AACH,qBAAa,UAAU;IACrB,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,KAAK,CAAqB;IAClC,OAAO,CAAC,MAAM,CAAuB;IAErC;;;OAGG;IACI,SAAS,CAAC,MAAM,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQlD;;OAEG;IACI,SAAS,IAAI,MAAM,GAAG,SAAS;IAItC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQ9C;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,GAAG,SAAS,GAAG,IAAI;IAQvD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;;OAGG;IACI,OAAO,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAgB9C;;;OAGG;IACI,UAAU,CAAC,IAAI,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAkBjD;;OAEG;IACI,OAAO,IAAI,MAAM,GAAG,SAAS;IAIpC;;OAEG;IACI,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,SAAS,GAAG,IAAI;IAQhD;;;;OAIG;IACI,iBAAiB,CAAC,kBAAkB,EAAE,MAAM,EAAE,mBAAmB,EAAE,GAAG,GAAG,IAAI;IASpF;;;OAGG;IACI,sBAAsB,CAAC,kBAAkB,EAAE,MAAM,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,SAAS;IAIxF;;OAEG;IACI,QAAQ,IAAI,MAAM,GAAG,SAAS;IAIrC;;OAEG;IACH,OAAO,CAAC,GAAG;IAqCJ,QAAQ,IAAI,MAAM;IA6BzB;;;OAGG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,EAAE,YAAY,EAAE,MAAM,GAAG,IAAI;WAUpD,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,UAAU;CAK9C;AAED,aAAK,iBAAiB,GAAG,QAAQ,GAAG,gBAAgB,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,MAAM,CAAC;AAEnG,aAAK,YAAY,GAAG,QAAQ,GAAG,MAAM,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;AAElE,qBAAa,QAAQ;aACgB,IAAI,EAAE,MAAM;aAAkB,IAAI,EAAE,YAAY;gBAAhD,IAAI,EAAE,MAAM,EAAkB,IAAI,EAAE,YAAY;WAErE,MAAM,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI9B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,IAAI,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;WAI5B,KAAK,CAAC,IAAI,EAAE,MAAM,GAAG,QAAQ;CAG5C;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAOlE;AAED;;GAEG;AACH,qBAAa,YAAY;IAMJ,QAAQ,CAAC,KAAK,EAAE,MAAM;IALzC,QAAQ,CAAC,WAAW,EAAE,MAAM,CAAC;IAC7B,aAAa,EAAE,iBAAiB,CAAC;IACjC,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,QAAQ,GAAG,SAAS,CAAC;gBAER,KAAK,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,iBAAiB;IAMpE;;;OAGG;IACI,OAAO,IAAI,QAAQ,GAAG,SAAS;IAItC;;OAEG;IACI,IAAI,IAAI,OAAO;CAmCvB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js b/node_modules/@azure/ms-rest-js/es/lib/url.js new file mode 100644 index 0000000000..0de4c16054 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js @@ -0,0 +1,595 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { replaceAll } from "./util/utils"; +/** + * A class that handles the query portion of a URLBuilder. + */ +var URLQuery = /** @class */ (function () { + function URLQuery() { + this._rawQuery = {}; + } + /** + * Get whether or not there any query parameters in this URLQuery. + */ + URLQuery.prototype.any = function () { + return Object.keys(this._rawQuery).length > 0; + }; + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + URLQuery.prototype.set = function (parameterName, parameterValue) { + if (parameterName) { + if (parameterValue != undefined) { + var newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } + else { + delete this._rawQuery[parameterName]; + } + } + }; + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + URLQuery.prototype.get = function (parameterName) { + return parameterName ? this._rawQuery[parameterName] : undefined; + }; + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + URLQuery.prototype.toString = function () { + var result = ""; + for (var parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + var parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + var parameterStrings = []; + for (var _i = 0, parameterValue_1 = parameterValue; _i < parameterValue_1.length; _i++) { + var parameterValueElement = parameterValue_1[_i]; + parameterStrings.push(parameterName + "=" + parameterValueElement); + } + result += parameterStrings.join("&"); + } + else { + result += parameterName + "=" + parameterValue; + } + } + return result; + }; + /** + * Parse a URLQuery from the provided text. + */ + URLQuery.parse = function (text) { + var result = new URLQuery(); + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + var currentState = "ParameterName"; + var parameterName = ""; + var parameterValue = ""; + for (var i = 0; i < text.length; ++i) { + var currentCharacter = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + case "&": + parameterName = ""; + parameterValue = ""; + break; + default: + parameterName += currentCharacter; + break; + } + break; + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + default: + parameterValue += currentCharacter; + break; + } + break; + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + return result; + }; + return URLQuery; +}()); +export { URLQuery }; +/** + * A class that handles creating, modifying, and parsing URLs. + */ +var URLBuilder = /** @class */ (function () { + function URLBuilder() { + } + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setScheme = function (scheme) { + if (!scheme) { + this._scheme = undefined; + } + else { + this.set(scheme, "SCHEME"); + } + }; + /** + * Get the scheme that has been set in this URL. + */ + URLBuilder.prototype.getScheme = function () { + return this._scheme; + }; + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setHost = function (host) { + if (!host) { + this._host = undefined; + } + else { + this.set(host, "SCHEME_OR_HOST"); + } + }; + /** + * Get the host that has been set in this URL. + */ + URLBuilder.prototype.getHost = function () { + return this._host; + }; + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + URLBuilder.prototype.setPort = function (port) { + if (port == undefined || port === "") { + this._port = undefined; + } + else { + this.set(port.toString(), "PORT"); + } + }; + /** + * Get the port that has been set in this URL. + */ + URLBuilder.prototype.getPort = function () { + return this._port; + }; + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + URLBuilder.prototype.setPath = function (path) { + if (!path) { + this._path = undefined; + } + else { + var schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + var schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } + else { + this.set(path, "PATH"); + } + } + }; + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + URLBuilder.prototype.appendPath = function (path) { + if (path) { + var currentPath = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + if (path.startsWith("/")) { + path = path.substring(1); + } + path = currentPath + path; + } + this.set(path, "PATH"); + } + }; + /** + * Get the path that has been set in this URL. + */ + URLBuilder.prototype.getPath = function () { + return this._path; + }; + /** + * Set the query in this URL. + */ + URLBuilder.prototype.setQuery = function (query) { + if (!query) { + this._query = undefined; + } + else { + this._query = URLQuery.parse(query); + } + }; + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + URLBuilder.prototype.setQueryParameter = function (queryParameterName, queryParameterValue) { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + }; + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + URLBuilder.prototype.getQueryParameterValue = function (queryParameterName) { + return this._query ? this._query.get(queryParameterName) : undefined; + }; + /** + * Get the query in this URL. + */ + URLBuilder.prototype.getQuery = function () { + return this._query ? this._query.toString() : undefined; + }; + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + URLBuilder.prototype.set = function (text, startState) { + var tokenizer = new URLTokenizer(text, startState); + while (tokenizer.next()) { + var token = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + case "HOST": + this._host = token.text || undefined; + break; + case "PORT": + this._port = token.text || undefined; + break; + case "PATH": + var tokenPath = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + default: + throw new Error("Unrecognized URLTokenType: " + token.type); + } + } + } + }; + URLBuilder.prototype.toString = function () { + var result = ""; + if (this._scheme) { + result += this._scheme + "://"; + } + if (this._host) { + result += this._host; + } + if (this._port) { + result += ":" + this._port; + } + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + if (this._query && this._query.any()) { + result += "?" + this._query.toString(); + } + return result; + }; + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + URLBuilder.prototype.replaceAll = function (searchValue, replaceValue) { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + }; + URLBuilder.parse = function (text) { + var result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + }; + return URLBuilder; +}()); +export { URLBuilder }; +var URLToken = /** @class */ (function () { + function URLToken(text, type) { + this.text = text; + this.type = type; + } + URLToken.scheme = function (text) { + return new URLToken(text, "SCHEME"); + }; + URLToken.host = function (text) { + return new URLToken(text, "HOST"); + }; + URLToken.port = function (text) { + return new URLToken(text, "PORT"); + }; + URLToken.path = function (text) { + return new URLToken(text, "PATH"); + }; + URLToken.query = function (text) { + return new URLToken(text, "QUERY"); + }; + return URLToken; +}()); +export { URLToken }; +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character) { + var characterCode = character.charCodeAt(0); + return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); +} +/** + * A class that tokenizes URL strings. + */ +var URLTokenizer = /** @class */ (function () { + function URLTokenizer(_text, state) { + this._text = _text; + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + URLTokenizer.prototype.current = function () { + return this._currentToken; + }; + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + URLTokenizer.prototype.next = function () { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } + else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + case "HOST": + nextHost(this); + break; + case "PORT": + nextPort(this); + break; + case "PATH": + nextPath(this); + break; + case "QUERY": + nextQuery(this); + break; + default: + throw new Error("Unrecognized URLTokenizerState: " + this._currentState); + } + } + return !!this._currentToken; + }; + return URLTokenizer; +}()); +export { URLTokenizer }; +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer) { + var result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer) { + return tokenizer._currentIndex < tokenizer._textLength; +} +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer) { + return tokenizer._text[tokenizer._currentIndex]; +} +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer, step) { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer, charactersToPeek) { + var endIndex = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer, condition) { + var result = ""; + while (hasCurrentCharacter(tokenizer)) { + var currentCharacter = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } + else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + return result; +} +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer) { + return readWhile(tokenizer, function (character) { return isAlphaNumericCharacter(character); }); +} +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer) { + var terminatingCharacters = []; + for (var _i = 1; _i < arguments.length; _i++) { + terminatingCharacters[_i - 1] = arguments[_i]; + } + return readWhile(tokenizer, function (character) { return terminatingCharacters.indexOf(character) === -1; }); +} +function nextScheme(tokenizer) { + var scheme = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "HOST"; + } +} +function nextSchemeOrHost(tokenizer) { + var schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } + else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } + } +} +function nextHost(tokenizer) { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + var host = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPort(tokenizer) { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + var port = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextPath(tokenizer) { + var path = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } + else { + tokenizer._currentState = "QUERY"; + } +} +function nextQuery(tokenizer) { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + var query = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} +//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/url.js.map b/node_modules/@azure/ms-rest-js/es/lib/url.js.map new file mode 100644 index 0000000000..68d9e43048 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/url.js.map @@ -0,0 +1 @@ +{"version":3,"file":"url.js","sourceRoot":"","sources":["../../lib/url.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAI1C;;GAEG;AACH;IAAA;QACmB,cAAS,GAAwD,EAAE,CAAC;IAqHvF,CAAC;IAnHC;;OAEG;IACI,sBAAG,GAAV;QACE,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,CAAC;IAED;;;;OAIG;IACI,sBAAG,GAAV,UAAW,aAAqB,EAAE,cAAmB;QACnD,IAAI,aAAa,EAAE;YACjB,IAAI,cAAc,IAAI,SAAS,EAAE;gBAC/B,IAAM,QAAQ,GAAG,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,cAAc,CAAC,QAAQ,EAAE,CAAC;gBAC5F,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,GAAG,QAAQ,CAAC;aAC1C;iBAAM;gBACL,OAAO,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;aACtC;SACF;IACH,CAAC;IAED;;;OAGG;IACI,sBAAG,GAAV,UAAW,aAAqB;QAC9B,OAAO,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACnE,CAAC;IAED;;OAEG;IACI,2BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAChB,KAAK,IAAM,aAAa,IAAI,IAAI,CAAC,SAAS,EAAE;YAC1C,IAAI,MAAM,EAAE;gBACV,MAAM,IAAI,GAAG,CAAC;aACf;YACD,IAAM,cAAc,GAAG,IAAI,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;YACrD,IAAI,KAAK,CAAC,OAAO,CAAC,cAAc,CAAC,EAAE;gBACjC,IAAM,gBAAgB,GAAG,EAAE,CAAC;gBAC5B,KAAoC,UAAc,EAAd,iCAAc,EAAd,4BAAc,EAAd,IAAc,EAAE;oBAA/C,IAAM,qBAAqB,uBAAA;oBAC9B,gBAAgB,CAAC,IAAI,CAAI,aAAa,SAAI,qBAAuB,CAAC,CAAC;iBACpE;gBACD,MAAM,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACtC;iBAAM;gBACL,MAAM,IAAO,aAAa,SAAI,cAAgB,CAAC;aAChD;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;OAEG;IACW,cAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;QAE9B,IAAI,IAAI,EAAE;YACR,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;aAC1B;YAED,IAAI,YAAY,GAAuB,eAAe,CAAC;YAEvD,IAAI,aAAa,GAAG,EAAE,CAAC;YACvB,IAAI,cAAc,GAAG,EAAE,CAAC;YACxB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;gBACpC,IAAM,gBAAgB,GAAW,IAAI,CAAC,CAAC,CAAC,CAAC;gBACzC,QAAQ,YAAY,EAAE;oBACpB,KAAK,eAAe;wBAClB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,YAAY,GAAG,gBAAgB,CAAC;gCAChC,MAAM;4BAER,KAAK,GAAG;gCACN,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,MAAM;4BAER;gCACE,aAAa,IAAI,gBAAgB,CAAC;gCAClC,MAAM;yBACT;wBACD,MAAM;oBAER,KAAK,gBAAgB;wBACnB,QAAQ,gBAAgB,EAAE;4BACxB,KAAK,GAAG;gCACN,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;gCAC1C,aAAa,GAAG,EAAE,CAAC;gCACnB,cAAc,GAAG,EAAE,CAAC;gCACpB,YAAY,GAAG,eAAe,CAAC;gCAC/B,MAAM;4BAER;gCACE,cAAc,IAAI,gBAAgB,CAAC;gCACnC,MAAM;yBACT;wBACD,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,qCAAqC,GAAG,YAAY,CAAC,CAAC;iBACzE;aACF;YACD,IAAI,YAAY,KAAK,gBAAgB,EAAE;gBACrC,MAAM,CAAC,GAAG,CAAC,aAAa,EAAE,cAAc,CAAC,CAAC;aAC3C;SACF;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,eAAC;AAAD,CAAC,AAtHD,IAsHC;;AAED;;GAEG;AACH;IAAA;IAiPA,CAAC;IA1OC;;;OAGG;IACI,8BAAS,GAAhB,UAAiB,MAA0B;QACzC,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC;SAC1B;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;SAC5B;IACH,CAAC;IAED;;OAEG;IACI,8BAAS,GAAhB;QACE,OAAO,IAAI,CAAC,OAAO,CAAC;IACtB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAClC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAiC;QAC9C,IAAI,IAAI,IAAI,SAAS,IAAI,IAAI,KAAK,EAAE,EAAE;YACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,MAAM,CAAC,CAAC;SACnC;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;;OAGG;IACI,4BAAO,GAAd,UAAe,IAAwB;QACrC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;SACxB;aAAM;YACL,IAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;YACxC,IAAI,WAAW,KAAK,CAAC,CAAC,EAAE;gBACtB,IAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,EAAE,WAAW,CAAC,CAAC;gBACvD,0FAA0F;gBAC1F,2FAA2F;gBAC3F,IAAI,CAAC,GAAG,CAAC,WAAW,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;aAC9E;iBAAM;gBACL,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;aACxB;SACF;IACH,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,IAAwB;QACxC,IAAI,IAAI,EAAE;YACR,IAAI,WAAW,GAAuB,IAAI,CAAC,OAAO,EAAE,CAAC;YACrD,IAAI,WAAW,EAAE;gBACf,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;oBAC9B,WAAW,IAAI,GAAG,CAAC;iBACpB;gBAED,IAAI,IAAI,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;oBACxB,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;iBAC1B;gBAED,IAAI,GAAG,WAAW,GAAG,IAAI,CAAC;aAC3B;YACD,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;SACxB;IACH,CAAC;IAED;;OAEG;IACI,4BAAO,GAAd;QACE,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf,UAAgB,KAAyB;QACvC,IAAI,CAAC,KAAK,EAAE;YACV,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;SACzB;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACrC;IACH,CAAC;IAED;;;;OAIG;IACI,sCAAiB,GAAxB,UAAyB,kBAA0B,EAAE,mBAAwB;QAC3E,IAAI,kBAAkB,EAAE;YACtB,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;gBAChB,IAAI,CAAC,MAAM,GAAG,IAAI,QAAQ,EAAE,CAAC;aAC9B;YACD,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,EAAE,mBAAmB,CAAC,CAAC;SAC1D;IACH,CAAC;IAED;;;OAGG;IACI,2CAAsB,GAA7B,UAA8B,kBAA0B;QACtD,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,6BAAQ,GAAf;QACE,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;IAC1D,CAAC;IAED;;OAEG;IACK,wBAAG,GAAX,UAAY,IAAY,EAAE,UAA6B;QACrD,IAAM,SAAS,GAAG,IAAI,YAAY,CAAC,IAAI,EAAE,UAAU,CAAC,CAAC;QAErD,OAAO,SAAS,CAAC,IAAI,EAAE,EAAE;YACvB,IAAM,KAAK,GAAyB,SAAS,CAAC,OAAO,EAAE,CAAC;YACxD,IAAI,KAAK,EAAE;gBACT,QAAQ,KAAK,CAAC,IAAI,EAAE;oBAClB,KAAK,QAAQ;wBACX,IAAI,CAAC,OAAO,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACvC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBACrC,MAAM;oBAER,KAAK,MAAM;wBACT,IAAM,SAAS,GAAuB,KAAK,CAAC,IAAI,IAAI,SAAS,CAAC;wBAC9D,IAAI,CAAC,IAAI,CAAC,KAAK,IAAI,IAAI,CAAC,KAAK,KAAK,GAAG,IAAI,SAAS,KAAK,GAAG,EAAE;4BAC1D,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC;yBACxB;wBACD,MAAM;oBAER,KAAK,OAAO;wBACV,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBACzC,MAAM;oBAER;wBACE,MAAM,IAAI,KAAK,CAAC,gCAA8B,KAAK,CAAC,IAAM,CAAC,CAAC;iBAC/D;aACF;SACF;IACH,CAAC;IAEM,6BAAQ,GAAf;QACE,IAAI,MAAM,GAAG,EAAE,CAAC;QAEhB,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,MAAM,IAAO,IAAI,CAAC,OAAO,QAAK,CAAC;SAChC;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,MAAM,IAAI,MAAI,IAAI,CAAC,KAAO,CAAC;SAC5B;QAED,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;gBAC/B,MAAM,IAAI,GAAG,CAAC;aACf;YACD,MAAM,IAAI,IAAI,CAAC,KAAK,CAAC;SACtB;QAED,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,EAAE;YACpC,MAAM,IAAI,MAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAI,CAAC;SACxC;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;OAGG;IACI,+BAAU,GAAjB,UAAkB,WAAmB,EAAE,YAAoB;QACzD,IAAI,WAAW,EAAE;YACf,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACxE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;YACpE,IAAI,CAAC,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,WAAW,EAAE,YAAY,CAAC,CAAC,CAAC;SACvE;IACH,CAAC;IAEa,gBAAK,GAAnB,UAAoB,IAAY;QAC9B,IAAM,MAAM,GAAG,IAAI,UAAU,EAAE,CAAC;QAChC,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;QACnC,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,iBAAC;AAAD,CAAC,AAjPD,IAiPC;;AAMD;IACE,kBAAmC,IAAY,EAAkB,IAAkB;QAAhD,SAAI,GAAJ,IAAI,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAc;IAAG,CAAC;IAEzE,eAAM,GAApB,UAAqB,IAAY;QAC/B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;IACtC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,aAAI,GAAlB,UAAmB,IAAY;QAC7B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,MAAM,CAAC,CAAC;IACpC,CAAC;IAEa,cAAK,GAAnB,UAAoB,IAAY;QAC9B,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IACrC,CAAC;IACH,eAAC;AAAD,CAAC,AAtBD,IAsBC;;AAED;;;GAGG;AACH,MAAM,UAAU,uBAAuB,CAAC,SAAiB;IACvD,IAAM,aAAa,GAAW,SAAS,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IACtD,OAAO,CACL,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,EAAE,CAAC,CAAC,SAAS;QAChE,CAAC,EAAE,CAAC,SAAS,IAAI,aAAa,IAAI,aAAa,IAAI,GAAG,CAAC,CAAC,SAAS,CAClE,CAAC;AACJ,CAAC;AAED;;GAEG;AACH;IAME,sBAA4B,KAAa,EAAE,KAAyB;QAAxC,UAAK,GAAL,KAAK,CAAQ;QACvC,IAAI,CAAC,WAAW,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;QAC5C,IAAI,CAAC,aAAa,GAAG,KAAK,IAAI,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,gBAAgB,CAAC;QACnE,IAAI,CAAC,aAAa,GAAG,CAAC,CAAC;IACzB,CAAC;IAED;;;OAGG;IACI,8BAAO,GAAd;QACE,OAAO,IAAI,CAAC,aAAa,CAAC;IAC5B,CAAC;IAED;;OAEG;IACI,2BAAI,GAAX;QACE,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,EAAE;YAC9B,IAAI,CAAC,aAAa,GAAG,SAAS,CAAC;SAChC;aAAM;YACL,QAAQ,IAAI,CAAC,aAAa,EAAE;gBAC1B,KAAK,QAAQ;oBACX,UAAU,CAAC,IAAI,CAAC,CAAC;oBACjB,MAAM;gBAER,KAAK,gBAAgB;oBACnB,gBAAgB,CAAC,IAAI,CAAC,CAAC;oBACvB,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,MAAM;oBACT,QAAQ,CAAC,IAAI,CAAC,CAAC;oBACf,MAAM;gBAER,KAAK,OAAO;oBACV,SAAS,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBAER;oBACE,MAAM,IAAI,KAAK,CAAC,qCAAmC,IAAI,CAAC,aAAe,CAAC,CAAC;aAC5E;SACF;QACD,OAAO,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC;IAC9B,CAAC;IACH,mBAAC;AAAD,CAAC,AA1DD,IA0DC;;AAED;;GAEG;AACH,SAAS,aAAa,CAAC,SAAuB;IAC5C,IAAI,MAAM,GAAG,EAAE,CAAC;IAChB,IAAI,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,EAAE;QACnD,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;QAC5D,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;KACjD;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,aAAa,GAAG,SAAS,CAAC,WAAW,CAAC;AACzD,CAAC;AAED;;GAEG;AACH,SAAS,mBAAmB,CAAC,SAAuB;IAClD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,aAAa,CAAC,CAAC;AAClD,CAAC;AAED;;;GAGG;AACH,SAAS,aAAa,CAAC,SAAuB,EAAE,IAAa;IAC3D,IAAI,mBAAmB,CAAC,SAAS,CAAC,EAAE;QAClC,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,CAAC,CAAC;SACV;QACD,SAAS,CAAC,aAAa,IAAI,IAAI,CAAC;KACjC;AACH,CAAC;AAED;;;GAGG;AACH,SAAS,cAAc,CAAC,SAAuB,EAAE,gBAAwB;IACvE,IAAI,QAAQ,GAAW,SAAS,CAAC,aAAa,GAAG,gBAAgB,CAAC;IAClE,IAAI,SAAS,CAAC,WAAW,GAAG,QAAQ,EAAE;QACpC,QAAQ,GAAG,SAAS,CAAC,WAAW,CAAC;KAClC;IACD,OAAO,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,SAAS,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,SAAS,SAAS,CAAC,SAAuB,EAAE,SAAyC;IACnF,IAAI,MAAM,GAAG,EAAE,CAAC;IAEhB,OAAO,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACrC,IAAM,gBAAgB,GAAW,mBAAmB,CAAC,SAAS,CAAC,CAAC;QAChE,IAAI,CAAC,SAAS,CAAC,gBAAgB,CAAC,EAAE;YAChC,MAAM;SACP;aAAM;YACL,MAAM,IAAI,gBAAgB,CAAC;YAC3B,aAAa,CAAC,SAAS,CAAC,CAAC;SAC1B;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;GAGG;AACH,SAAS,sBAAsB,CAAC,SAAuB;IACrD,OAAO,SAAS,CAAC,SAAS,EAAE,UAAC,SAAiB,IAAK,OAAA,uBAAuB,CAAC,SAAS,CAAC,EAAlC,CAAkC,CAAC,CAAC;AACzF,CAAC;AAED;;;GAGG;AACH,SAAS,kBAAkB,CAAC,SAAuB;IAAE,+BAAkC;SAAlC,UAAkC,EAAlC,qBAAkC,EAAlC,IAAkC;QAAlC,8CAAkC;;IACrF,OAAO,SAAS,CACd,SAAS,EACT,UAAC,SAAiB,IAAK,OAAA,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAA/C,CAA+C,CACvE,CAAC;AACJ,CAAC;AAED,SAAS,UAAU,CAAC,SAAuB;IACzC,IAAM,MAAM,GAAW,sBAAsB,CAAC,SAAS,CAAC,CAAC;IACzD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;AACH,CAAC;AAED,SAAS,gBAAgB,CAAC,SAAuB;IAC/C,IAAM,YAAY,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC1E,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC;YACxD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;YACtD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;KACF;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;QACtD,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;YAC1C,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;SAClC;aAAM;YACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;SACnC;KACF;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,cAAc,CAAC,SAAS,EAAE,CAAC,CAAC,KAAK,KAAK,EAAE;QAC1C,aAAa,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;KAC7B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAClE,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC;IAC7D,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QACjD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,QAAQ,CAAC,SAAuB;IACvC,IAAM,IAAI,GAAW,kBAAkB,CAAC,SAAS,EAAE,GAAG,CAAC,CAAC;IACxD,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE9C,IAAI,CAAC,mBAAmB,CAAC,SAAS,CAAC,EAAE;QACnC,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;KAClC;SAAM;QACL,SAAS,CAAC,aAAa,GAAG,OAAO,CAAC;KACnC;AACH,CAAC;AAED,SAAS,SAAS,CAAC,SAAuB;IACxC,IAAI,mBAAmB,CAAC,SAAS,CAAC,KAAK,GAAG,EAAE;QAC1C,aAAa,CAAC,SAAS,CAAC,CAAC;KAC1B;IAED,IAAM,KAAK,GAAW,aAAa,CAAC,SAAS,CAAC,CAAC;IAC/C,SAAS,CAAC,aAAa,GAAG,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;IAChD,SAAS,CAAC,aAAa,GAAG,MAAM,CAAC;AACnC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts new file mode 100644 index 0000000000..5c40c44b19 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map new file mode 100644 index 0000000000..89074b251f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAMzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAOtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js new file mode 100644 index 0000000000..1c76f7b257 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + var str = ""; + for (var i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + var byteString = atob(value); + var arr = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map new file mode 100644 index 0000000000..47aea45f58 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.browser.js","sourceRoot":"","sources":["../../../lib/util/base64.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,IAAI,CAAC,KAAK,CAAC,CAAC;AACrB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,IAAI,GAAG,GAAG,EAAE,CAAC;IACb,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACrC,GAAG,IAAI,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;KACtC;IACD,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC;AACnB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;IAC/B,IAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;IAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;KACnC;IACD,OAAO,GAAG,CAAC;AACb,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts new file mode 100644 index 0000000000..1dfd806b76 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts @@ -0,0 +1,16 @@ +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export declare function encodeString(value: string): string; +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export declare function encodeByteArray(value: Uint8Array): string; +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export declare function decodeString(value: string): Uint8Array; +//# sourceMappingURL=base64.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map new file mode 100644 index 0000000000..3af7f47b96 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.d.ts","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAGA;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,CAElD;AAED;;;GAGG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,UAAU,GAAG,MAAM,CAKzD;AAED;;;GAGG;AACH,wBAAgB,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,UAAU,CAEtD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js new file mode 100644 index 0000000000..6626510c47 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value) { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + var bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map new file mode 100644 index 0000000000..0ca9f5454d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/base64.js.map @@ -0,0 +1 @@ +{"version":3,"file":"base64.js","sourceRoot":"","sources":["../../../lib/util/base64.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,KAAiB;IAC/C,kGAAkG;IAClG,mGAAmG;IACnG,IAAM,WAAW,GAAG,KAAK,YAAY,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,MAAqB,CAAC,CAAC;IAC/F,OAAO,WAAW,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,YAAY,CAAC,KAAa;IACxC,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts new file mode 100644 index 0000000000..aee2743d0a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts @@ -0,0 +1,94 @@ +export declare const Constants: { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: string; + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: string; + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: string; + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: string; + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: string; + /** + * Specifies NO Proxy. + */ + NO_PROXY: string; + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: string; + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: string; + GET: string; + DELETE: string; + POST: string; + MERGE: string; + HEAD: string; + PATCH: string; + }; + StatusCodes: { + TooManyRequests: number; + }; + }; + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: string; + AUTHORIZATION_SCHEME: string; + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: string; + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: string; + }; +}; +//# sourceMappingURL=constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map new file mode 100644 index 0000000000..4184fb190e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.d.ts","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,SAAS;IACpB;;;;OAIG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;;;;OAKG;;IAGH;;OAEG;;IAGH;;OAEG;;;QAID;;;;;WAKG;;;;;;;;;;;;;;IAgBL;;OAEG;;QAED;;;;;WAKG;;;QAKH;;;;;;;WAOG;;QAGH;;;;;WAKG;;;CAGN,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js new file mode 100644 index 0000000000..186ebcb01a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +export var Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.2", + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + StatusCodes: { + TooManyRequests: 429, + }, + }, + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + AUTHORIZATION_SCHEME: "Bearer", + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map new file mode 100644 index 0000000000..3118dae42b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../lib/util/constants.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,MAAM,CAAC,IAAM,SAAS,GAAG;IACvB;;;;OAIG;IACH,aAAa,EAAE,OAAO;IAEtB;;;;;OAKG;IACH,IAAI,EAAE,OAAO;IAEb;;;;;OAKG;IACH,KAAK,EAAE,QAAQ;IAEf;;;;;OAKG;IACH,UAAU,EAAE,YAAY;IAExB;;;;;OAKG;IACH,WAAW,EAAE,aAAa;IAE1B;;OAEG;IACH,QAAQ,EAAE,UAAU;IAEpB;;OAEG;IACH,SAAS,EAAE,WAAW;IAEtB,aAAa,EAAE;QACb;;;;;WAKG;QACH,SAAS,EAAE;YACT,GAAG,EAAE,KAAK;YACV,GAAG,EAAE,KAAK;YACV,MAAM,EAAE,QAAQ;YAChB,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,KAAK,EAAE,OAAO;SACf;QAED,WAAW,EAAE;YACX,eAAe,EAAE,GAAG;SACrB;KACF;IAED;;OAEG;IACH,eAAe,EAAE;QACf;;;;;WAKG;QACH,aAAa,EAAE,eAAe;QAE9B,oBAAoB,EAAE,QAAQ;QAE9B;;;;;;;WAOG;QACH,WAAW,EAAE,aAAa;QAE1B;;;;;WAKG;QACH,UAAU,EAAE,YAAY;KACzB;CACF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts new file mode 100644 index 0000000000..56eabdc77e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts @@ -0,0 +1,157 @@ +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export declare const isNode: boolean; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export declare function urlIsHTTPS(urlToCheck: { + protocol: string; +}): boolean; +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export declare function encodeUri(uri: string): string; +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export declare function stripResponse(response: HttpOperationResponse): any; +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export declare function stripRequest(request: WebResourceLike): WebResourceLike; +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export declare function isValidUuid(uuid: string): boolean; +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export declare function objectValues(obj: { + [key: string]: any; +}): any[]; +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export declare function generateUuid(): string; +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export declare function executePromisesSequentially(promiseFactories: Array, kickstart: any): Promise; +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export declare function mergeObjects(source: { + [key: string]: any; +}, target: { + [key: string]: any; +}): { + [key: string]: any; +}; +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export declare function delay(t: number, value?: T): Promise; +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + (err: Error | RestError | null, result?: TResult, request?: WebResourceLike, response?: HttpOperationResponse): void; +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export declare function promiseToCallback(promise: Promise): Function; +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export declare function promiseToServiceCallback(promise: Promise): Function; +export declare function prepareXMLRootList(obj: any, elementName: string): { + [x: string]: any; +}; +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export declare function applyMixins(targetCtor: any, sourceCtors: any[]): void; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export declare function isDuration(value: string): boolean; +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export declare function replaceAll(value: string | undefined, searchValue: string, replaceValue: string): string | undefined; +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export declare function isPrimitiveType(value: any): boolean; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map new file mode 100644 index 0000000000..ebb7618df3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,qBAAqB,EAAE,MAAM,0BAA0B,CAAC;AACjE,OAAO,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AACzC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD;;GAEG;AACH,eAAO,MAAM,MAAM,SAIM,CAAC;AAE1B;;;;;GAKG;AACH,wBAAgB,UAAU,CAAC,UAAU,EAAE;IAAE,QAAQ,EAAE,MAAM,CAAA;CAAE,GAAG,OAAO,CAEpE;AAED;;;;;GAKG;AACH,wBAAgB,SAAS,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAO7C;AAED;;;;;;;GAOG;AACH,wBAAgB,aAAa,CAAC,QAAQ,EAAE,qBAAqB,GAAG,GAAG,CAMlE;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,OAAO,EAAE,eAAe,GAAG,eAAe,CAMtE;AAED;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,CAMjD;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,GAAG,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,GAAG,GAAG,EAAE,CAkB/D;AAED;;;;GAIG;AACH,wBAAgB,YAAY,IAAI,MAAM,CAErC;AAED;;;;;;;;;;GAUG;AACH,wBAAgB,2BAA2B,CAAC,gBAAgB,EAAE,KAAK,CAAC,GAAG,CAAC,EAAE,SAAS,EAAE,GAAG,gBAMvF;AAED;;;;;;;GAOG;AACH,wBAAgB,YAAY,CAAC,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE,EAAE,MAAM,EAAE;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;CAAE;;EAK1F;AAED;;;;;GAKG;AACH,wBAAgB,KAAK,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,KAAK,CAAC,EAAE,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAEzD;AAED;;GAEG;AACH,MAAM,WAAW,eAAe,CAAC,OAAO;IACtC;;;;;;OAMG;IACH,CACE,GAAG,EAAE,KAAK,GAAG,SAAS,GAAG,IAAI,EAC7B,MAAM,CAAC,EAAE,OAAO,EAChB,OAAO,CAAC,EAAE,eAAe,EACzB,QAAQ,CAAC,EAAE,qBAAqB,GAC/B,IAAI,CAAC;CACT;AAED;;;;;GAKG;AACH,wBAAgB,iBAAiB,CAAC,OAAO,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,QAAQ,CAcjE;AAED;;;;GAIG;AACH,wBAAgB,wBAAwB,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,CAAC,qBAAqB,CAAC,GAAG,QAAQ,CAc7F;AAED,wBAAgB,kBAAkB,CAAC,GAAG,EAAE,GAAG,EAAE,WAAW,EAAE,MAAM;;EAK/D;AAED;;;;GAIG;AACH,wBAAgB,WAAW,CAAC,UAAU,EAAE,GAAG,EAAE,WAAW,EAAE,GAAG,EAAE,GAAG,IAAI,CAMrE;AAID;;;;GAIG;AACH,wBAAgB,UAAU,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAEjD;AAED;;;;;;GAMG;AACH,wBAAgB,UAAU,CACxB,KAAK,EAAE,MAAM,GAAG,SAAS,EACzB,WAAW,EAAE,MAAM,EACnB,YAAY,EAAE,MAAM,GACnB,MAAM,GAAG,SAAS,CAEpB;AAED;;;;;GAKG;AACH,wBAAgB,eAAe,CAAC,KAAK,EAAE,GAAG,GAAG,OAAO,CAEnD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js new file mode 100644 index 0000000000..24877684b8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js @@ -0,0 +1,229 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { v4 as uuidv4 } from "uuid"; +import { Constants } from "./constants"; +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export var isNode = typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck) { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri) { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response) { + var strippedResponse = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request) { + var strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid) { + var validUuidRegex = new RegExp("^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", "ig"); + return validUuidRegex.test(uuid); +} +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj) { + var result = []; + if (obj && obj instanceof Object) { + for (var key in obj) { + if (obj.hasOwnProperty(key)) { + result.push(obj[key]); + } + } + } + else { + throw new Error("The provided object " + JSON.stringify(obj, undefined, 2) + " is not a valid object that can be " + "enumerated to provide its values as an array."); + } + return result; +} +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid() { + return uuidv4(); +} +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories, kickstart) { + var result = Promise.resolve(kickstart); + promiseFactories.forEach(function (promiseFactory) { + result = result.then(promiseFactory); + }); + return result; +} +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source, target) { + Object.keys(source).forEach(function (key) { + target[key] = source[key]; + }); + return target; +} +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t, value) { + return new Promise(function (resolve) { return setTimeout(function () { return resolve(value); }, t); }); +} +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + cb(undefined, data); + }, function (err) { + cb(err); + }); + }; +} +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise) { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return function (cb) { + promise.then(function (data) { + process.nextTick(cb, undefined, data.parsedBody, data.request, data); + }, function (err) { + process.nextTick(cb, err); + }); + }; +} +export function prepareXMLRootList(obj, elementName) { + var _a; + if (!Array.isArray(obj)) { + obj = [obj]; + } + return _a = {}, _a[elementName] = obj, _a; +} +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor, sourceCtors) { + sourceCtors.forEach(function (sourceCtors) { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach(function (name) { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} +var validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll(value, searchValue, replaceValue) { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value) { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map new file mode 100644 index 0000000000..a59b702085 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../lib/util/utils.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,EAAE,IAAI,MAAM,EAAE,MAAM,MAAM,CAAC;AAIpC,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH,MAAM,CAAC,IAAM,MAAM,GACjB,OAAO,OAAO,KAAK,WAAW;IAC9B,CAAC,CAAC,OAAO,CAAC,OAAO;IACjB,CAAC,CAAC,OAAO,CAAC,QAAQ;IAClB,CAAC,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;AAE1B;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,UAAgC;IACzD,OAAO,UAAU,CAAC,QAAQ,CAAC,WAAW,EAAE,KAAK,SAAS,CAAC,KAAK,CAAC;AAC/D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,OAAO,kBAAkB,CAAC,GAAG,CAAC;SAC3B,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;SACpB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,aAAa,CAAC,QAA+B;IAC3D,IAAM,gBAAgB,GAAQ,EAAE,CAAC;IACjC,gBAAgB,CAAC,IAAI,GAAG,QAAQ,CAAC,UAAU,CAAC;IAC5C,gBAAgB,CAAC,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;IAC5C,gBAAgB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC;IAC1C,OAAO,gBAAgB,CAAC;AAC1B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,OAAwB;IACnD,IAAM,eAAe,GAAG,OAAO,CAAC,KAAK,EAAE,CAAC;IACxC,IAAI,eAAe,CAAC,OAAO,EAAE;QAC3B,eAAe,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;KACjD;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,IAAM,cAAc,GAAG,IAAI,MAAM,CAC/B,+EAA+E,EAC/E,IAAI,CACL,CAAC;IACF,OAAO,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,GAA2B;IACtD,IAAM,MAAM,GAAU,EAAE,CAAC;IACzB,IAAI,GAAG,IAAI,GAAG,YAAY,MAAM,EAAE;QAChC,KAAK,IAAM,GAAG,IAAI,GAAG,EAAE;YACrB,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,EAAE;gBAC3B,MAAM,CAAC,IAAI,CAAO,GAAI,CAAC,GAAG,CAAC,CAAC,CAAC;aAC9B;SACF;KACF;SAAM;QACL,MAAM,IAAI,KAAK,CACb,yBAAuB,IAAI,CAAC,SAAS,CACnC,GAAG,EACH,SAAS,EACT,CAAC,CACF,wCAAqC,GAAG,+CAA+C,CACzF,CAAC;KACH;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY;IAC1B,OAAO,MAAM,EAAE,CAAC;AAClB,CAAC;AAED;;;;;;;;;;GAUG;AACH,MAAM,UAAU,2BAA2B,CAAC,gBAA4B,EAAE,SAAc;IACtF,IAAI,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxC,gBAAgB,CAAC,OAAO,CAAC,UAAC,cAAc;QACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;IACvC,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,YAAY,CAAC,MAA8B,EAAE,MAA8B;IACzF,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,UAAC,GAAG;QAC9B,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC;IAC5B,CAAC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,KAAK,CAAI,CAAS,EAAE,KAAS;IAC3C,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,IAAK,OAAA,UAAU,CAAC,cAAM,OAAA,OAAO,CAAC,KAAK,CAAC,EAAd,CAAc,EAAE,CAAC,CAAC,EAAnC,CAAmC,CAAC,CAAC;AACvE,CAAC;AAqBD;;;;;GAKG;AACH,MAAM,UAAU,iBAAiB,CAAC,OAAqB;IACrD,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAY;QAClB,OAAO,CAAC,IAAI,CACV,UAAC,IAAS;YACR,EAAE,CAAC,SAAS,EAAE,IAAI,CAAC,CAAC;QACtB,CAAC,EACD,UAAC,GAAU;YACT,EAAE,CAAC,GAAG,CAAC,CAAC;QACV,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CAAI,OAAuC;IACjF,IAAI,OAAO,OAAO,CAAC,IAAI,KAAK,UAAU,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,sCAAsC,CAAC,CAAC;KACzD;IACD,OAAO,UAAC,EAAsB;QAC5B,OAAO,CAAC,IAAI,CACV,UAAC,IAA2B;YAC1B,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,UAAe,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QAC5E,CAAC,EACD,UAAC,GAAU;YACT,OAAO,CAAC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,CAAC;QAC5B,CAAC,CACF,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,GAAQ,EAAE,WAAmB;;IAC9D,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QACvB,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;KACb;IACD,gBAAS,GAAC,WAAW,IAAG,GAAG,KAAG;AAChC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,WAAW,CAAC,UAAe,EAAE,WAAkB;IAC7D,WAAW,CAAC,OAAO,CAAC,UAAC,WAAW;QAC9B,MAAM,CAAC,mBAAmB,CAAC,WAAW,CAAC,SAAS,CAAC,CAAC,OAAO,CAAC,UAAC,IAAI;YAC7D,UAAU,CAAC,SAAS,CAAC,IAAI,CAAC,GAAG,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QAC3D,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;AACL,CAAC;AAED,IAAM,mBAAmB,GAAG,qKAAqK,CAAC;AAElM;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,KAAa;IACtC,OAAO,mBAAmB,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACzC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CACxB,KAAyB,EACzB,WAAmB,EACnB,YAAoB;IAEpB,OAAO,CAAC,KAAK,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,IAAI,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC;AAC5F,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,KAAU;IACxC,OAAO,CAAC,OAAO,KAAK,KAAK,QAAQ,IAAI,OAAO,KAAK,KAAK,UAAU,CAAC,IAAI,KAAK,KAAK,IAAI,CAAC;AACtF,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts new file mode 100644 index 0000000000..a9d4674c78 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts @@ -0,0 +1,5 @@ +export declare function parseXML(str: string): Promise; +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +//# sourceMappingURL=xml.browser.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map new file mode 100644 index 0000000000..671c2174d8 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAkBA,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAUlD;AAwFD,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAMlE"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js new file mode 100644 index 0000000000..f743518740 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js @@ -0,0 +1,160 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +var _a, _b; +var parser = new DOMParser(); +// Policy to make our code Trusted Types compliant. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +var ttPolicy; +if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: function (s) { return s; }, + }); +} +export function parseXML(str) { + var _a; + try { + var dom = parser.parseFromString(((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _a !== void 0 ? _a : str), "application/xml"); + throwIfError(dom); + var obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +var errorNS = ""; +try { + var invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = (_b = parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; +} +catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} +function throwIfError(dom) { + if (errorNS) { + var parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0).innerHTML); + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node) { + var result = {}; + var childNodeCount = node.childNodes.length; + var firstChildNode = node.childNodes[0]; + var onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + var elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + for (var i = 0; i < elementWithAttributes.attributes.length; i++) { + var attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (var i = 0; i < childNodeCount; i++) { + var child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + var childObject = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +// tslint:disable-next-line:no-null-keyword +var doc = document.implementation.createDocument(null, null, null); +var serializer = new XMLSerializer(); +export function stringifyXML(obj, opts) { + var rootName = (opts && opts.rootName) || "root"; + var dom = buildNode(obj, rootName)[0]; + return ('' + serializer.serializeToString(dom)); +} +function buildAttributes(attrs) { + var result = []; + for (var _i = 0, _a = Object.keys(attrs); _i < _a.length; _i++) { + var key = _a[_i]; + var attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName) { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + var elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + var result = []; + for (var _i = 0, obj_1 = obj; _i < obj_1.length; _i++) { + var arrayElem = obj_1[_i]; + for (var _a = 0, _b = buildNode(arrayElem, elementName); _a < _b.length; _a++) { + var child = _b[_a]; + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + var elem = doc.createElement(elementName); + for (var _c = 0, _d = Object.keys(obj); _c < _d.length; _c++) { + var key = _d[_c]; + if (key === "$") { + for (var _e = 0, _f = buildAttributes(obj[key]); _e < _f.length; _e++) { + var attr = _f[_e]; + elem.attributes.setNamedItem(attr); + } + } + else { + for (var _g = 0, _h = buildNode(obj[key], key); _g < _h.length; _g++) { + var child = _h[_g]; + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error("Illegal value passed to buildObject: " + obj); + } +} +//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map new file mode 100644 index 0000000000..9c46c6be93 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.browser.js","sourceRoot":"","sources":["../../../lib/util/xml.browser.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;;AAE/F,IAAM,MAAM,GAAG,IAAI,SAAS,EAAE,CAAC;AAE/B,mDAAmD;AACnD,mDAAmD;AACnD,uFAAuF;AACvF,qFAAqF;AACrF,kFAAkF;AAClF,0BAA0B;AAC1B,IAAI,QAA2D,CAAC;AAChE,IAAI,OAAO,IAAI,CAAC,YAAY,KAAK,WAAW,EAAE;IAC5C,QAAQ,GAAG,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,+BAA+B,EAAE;QACzE,UAAU,EAAE,UAAC,CAAC,IAAK,OAAA,CAAC,EAAD,CAAC;KACrB,CAAC,CAAC;CACJ;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW;;IAClC,IAAI;QACF,IAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,OAAC,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,GAAG,oCAAK,GAAG,CAAW,EAAE,iBAAiB,CAAC,CAAC;QACpG,YAAY,CAAC,GAAG,CAAC,CAAC;QAElB,IAAM,GAAG,GAAG,WAAW,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3C,OAAO,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;KAC7B;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;KAC5B;AACH,CAAC;AAED,IAAI,OAAO,GAAG,EAAE,CAAC;AACjB,IAAI;IACF,IAAM,UAAU,GAAG,OAAC,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,UAAU,CAAC,SAAS,oCAAK,SAAS,CAAW,CAAC;IAC5E,OAAO,SACL,MAAM,CAAC,eAAe,CAAC,UAAU,EAAE,UAAU,CAAC,CAAC,oBAAoB,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC;SAClF,YAAa,mCAAI,EAAE,CAAC;CAC1B;AAAC,OAAO,OAAO,EAAE;IAChB,oFAAoF;CACrF;AAED,SAAS,YAAY,CAAC,GAAa;IACjC,IAAI,OAAO,EAAE;QACX,IAAM,YAAY,GAAG,GAAG,CAAC,sBAAsB,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;QACxE,IAAI,YAAY,CAAC,MAAM,EAAE;YACvB,MAAM,IAAI,KAAK,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC,CAAE,CAAC,SAAS,CAAC,CAAC;SAClD;KACF;AACH,CAAC;AAED,SAAS,SAAS,CAAC,IAAU;IAC3B,OAAO,CAAC,CAAE,IAAgB,CAAC,UAAU,CAAC;AACxC,CAAC;AAED;;;GAGG;AACH,SAAS,uBAAuB,CAAC,IAAU;IACzC,OAAO,SAAS,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;AACpE,CAAC;AAED,SAAS,WAAW,CAAC,IAAU;IAC7B,IAAI,MAAM,GAAQ,EAAE,CAAC;IAErB,IAAM,cAAc,GAAW,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;IAEtD,IAAM,cAAc,GAAS,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;IAChD,IAAM,kBAAkB,GACtB,CAAC,cAAc;QACb,cAAc,KAAK,CAAC;QACpB,cAAc,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS;QAC1C,cAAc,CAAC,SAAS,CAAC;QAC3B,SAAS,CAAC;IAEZ,IAAM,qBAAqB,GAAwB,uBAAuB,CAAC,IAAI,CAAC,CAAC;IACjF,IAAI,qBAAqB,EAAE;QACzB,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QAEjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,qBAAqB,CAAC,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAChE,IAAM,IAAI,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjD,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,SAAS,CAAC;SAC7C;QAED,IAAI,kBAAkB,EAAE;YACtB,MAAM,CAAC,GAAG,CAAC,GAAG,kBAAkB,CAAC;SAClC;KACF;SAAM,IAAI,cAAc,KAAK,CAAC,EAAE;QAC/B,MAAM,GAAG,EAAE,CAAC;KACb;SAAM,IAAI,kBAAkB,EAAE;QAC7B,MAAM,GAAG,kBAAkB,CAAC;KAC7B;IAED,IAAI,CAAC,kBAAkB,EAAE;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;YACvC,IAAM,KAAK,GAAG,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;YACjC,2CAA2C;YAC3C,IAAI,KAAK,CAAC,QAAQ,KAAK,IAAI,CAAC,SAAS,EAAE;gBACrC,IAAM,WAAW,GAAQ,WAAW,CAAC,KAAK,CAAC,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE;oBAC3B,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,WAAW,CAAC;iBACtC;qBAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,EAAE;oBAChD,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;iBAC1C;qBAAM;oBACL,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC,CAAC;iBAChE;aACF;SACF;KACF;IAED,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,2CAA2C;AAC3C,IAAM,GAAG,GAAG,QAAQ,CAAC,cAAc,CAAC,cAAc,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;AACrE,IAAM,UAAU,GAAG,IAAI,aAAa,EAAE,CAAC;AAEvC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,QAAQ,GAAG,CAAC,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC;IACnD,IAAM,GAAG,GAAG,SAAS,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;IACxC,OAAO,CACL,yDAAyD,GAAG,UAAU,CAAC,iBAAiB,CAAC,GAAG,CAAC,CAC9F,CAAC;AACJ,CAAC;AAED,SAAS,eAAe,CAAC,KAAgD;IACvE,IAAM,MAAM,GAAG,EAAE,CAAC;IAClB,KAAkB,UAAkB,EAAlB,KAAA,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,EAAlB,cAAkB,EAAlB,IAAkB,EAAE;QAAjC,IAAM,GAAG,SAAA;QACZ,IAAM,IAAI,GAAG,GAAG,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;QACtC,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACnB;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,SAAS,SAAS,CAAC,GAAQ,EAAE,WAAmB;IAC9C,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,QAAQ,IAAI,OAAO,GAAG,KAAK,SAAS,EAAE;QAClF,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,IAAI,CAAC,WAAW,GAAG,GAAG,CAAC,QAAQ,EAAE,CAAC;QAClC,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM,IAAI,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE;QAC7B,IAAM,MAAM,GAAG,EAAE,CAAC;QAClB,KAAwB,UAAG,EAAH,WAAG,EAAH,iBAAG,EAAH,IAAG,EAAE;YAAxB,IAAM,SAAS,YAAA;YAClB,KAAoB,UAAiC,EAAjC,KAAA,SAAS,CAAC,SAAS,EAAE,WAAW,CAAC,EAAjC,cAAiC,EAAjC,IAAiC,EAAE;gBAAlD,IAAM,KAAK,SAAA;gBACd,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACpB;SACF;QACD,OAAO,MAAM,CAAC;KACf;SAAM,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAClC,IAAM,IAAI,GAAG,GAAG,CAAC,aAAa,CAAC,WAAW,CAAC,CAAC;QAC5C,KAAkB,UAAgB,EAAhB,KAAA,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,EAAhB,cAAgB,EAAhB,IAAgB,EAAE;YAA/B,IAAM,GAAG,SAAA;YACZ,IAAI,GAAG,KAAK,GAAG,EAAE;gBACf,KAAmB,UAAyB,EAAzB,KAAA,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,EAAzB,cAAyB,EAAzB,IAAyB,EAAE;oBAAzC,IAAM,IAAI,SAAA;oBACb,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;iBACpC;aACF;iBAAM;gBACL,KAAoB,UAAwB,EAAxB,KAAA,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAxB,cAAwB,EAAxB,IAAwB,EAAE;oBAAzC,IAAM,KAAK,SAAA;oBACd,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;iBACzB;aACF;SACF;QACD,OAAO,CAAC,IAAI,CAAC,CAAC;KACf;SAAM;QACL,MAAM,IAAI,KAAK,CAAC,0CAAwC,GAAK,CAAC,CAAC;KAChE;AACH,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts new file mode 100644 index 0000000000..3290810e7a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts @@ -0,0 +1,5 @@ +export declare function stringifyXML(obj: any, opts?: { + rootName?: string; +}): string; +export declare function parseXML(str: string): Promise; +//# sourceMappingURL=xml.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map new file mode 100644 index 0000000000..e8c534992a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.d.ts","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAKA,wBAAgB,YAAY,CAAC,GAAG,EAAE,GAAG,EAAE,IAAI,CAAC,EAAE;IAAE,QAAQ,CAAC,EAAE,MAAM,CAAA;CAAE,UAQlE;AAED,wBAAgB,QAAQ,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,CAmBlD"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js new file mode 100644 index 0000000000..3e3bd25dc3 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import * as xml2js from "xml2js"; +export function stringifyXML(obj, opts) { + var builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} +export function parseXML(str) { + var xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise(function (resolve, reject) { + if (!str) { + reject(new Error("Document is empty")); + } + else { + xmlParser.parseString(str, function (err, res) { + if (err) { + reject(err); + } + else { + resolve(res); + } + }); + } + }); +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map new file mode 100644 index 0000000000..6cf4bd03fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/util/xml.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xml.js","sourceRoot":"","sources":["../../../lib/util/xml.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,UAAU,YAAY,CAAC,GAAQ,EAAE,IAA4B;IACjE,IAAM,OAAO,GAAG,IAAI,MAAM,CAAC,OAAO,CAAC;QACjC,QAAQ,EAAE,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,QAAQ;QAC/B,UAAU,EAAE;YACV,MAAM,EAAE,KAAK;SACd;KACF,CAAC,CAAC;IACH,OAAO,OAAO,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAClC,CAAC;AAED,MAAM,UAAU,QAAQ,CAAC,GAAW;IAClC,IAAM,SAAS,GAAG,IAAI,MAAM,CAAC,MAAM,CAAC;QAClC,aAAa,EAAE,KAAK;QACpB,eAAe,EAAE,KAAK;QACtB,YAAY,EAAE,KAAK;KACpB,CAAC,CAAC;IACH,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,GAAG,EAAE;YACR,MAAM,CAAC,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC;SACxC;aAAM;YACL,SAAS,CAAC,WAAW,CAAC,GAAG,EAAE,UAAC,GAAQ,EAAE,GAAQ;gBAC5C,IAAI,GAAG,EAAE;oBACP,MAAM,CAAC,GAAG,CAAC,CAAC;iBACb;qBAAM;oBACL,OAAO,CAAC,GAAG,CAAC,CAAC;iBACd;YACH,CAAC,CAAC,CAAC;SACJ;IACH,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts new file mode 100644 index 0000000000..7eb39af1d2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts @@ -0,0 +1,337 @@ +/// +import { HttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper } from "./serializer"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; +export declare type HttpMethods = "GET" | "PUT" | "POST" | "DELETE" | "PATCH" | "HEAD" | "OPTIONS" | "TRACE"; +export declare type HttpRequestBody = Blob | string | ArrayBuffer | ArrayBufferView | (() => NodeJS.ReadableStream); +/** + * Fired in response to upload or download progress. + */ +export declare type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; + removeEventListener: (type: "abort", listener: (this: AbortSignalLike, ev: Event) => any, options?: any) => void; +} +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { + [key: string]: any; + }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} +export declare function isWebResourceLike(object: any): object is WebResourceLike; +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export declare class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: (operationSpec: OperationSpec, response: HttpOperationResponse) => undefined | OperationResponse; + formData?: any; + query?: { + [key: string]: any; + }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + abortSignal?: AbortSignalLike; + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + constructor(url?: string, method?: HttpMethods, body?: any, query?: { + [key: string]: any; + }, headers?: { + [key: string]: any; + } | HttpHeadersLike, streamResponseBody?: boolean, withCredentials?: boolean, abortSignal?: AbortSignalLike, timeout?: number, onUploadProgress?: (progress: TransferProgressEvent) => void, onDownloadProgress?: (progress: TransferProgressEvent) => void, proxySettings?: ProxySettings, keepAlive?: boolean, agentSettings?: AgentSettings, redirectLimit?: number); + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource; +} +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { + [key: string]: any | ParameterValue; + }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { + [key: string]: any | ParameterValue; + }; + formData?: { + [key: string]: any; + }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { + [key: string]: any; + }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { + [x: string]: any; + }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { + [key: string]: string; + }; + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + [key: string]: any; +} +//# sourceMappingURL=webResource.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map new file mode 100644 index 0000000000..3943c0b7a1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.d.ts","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":";AAGA,OAAO,EAAe,eAAe,EAAqB,MAAM,eAAe,CAAC;AAChF,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,MAAM,EAAc,MAAM,cAAc,CAAC;AAElD,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAE/D,oBAAY,WAAW,GACnB,KAAK,GACL,KAAK,GACL,MAAM,GACN,QAAQ,GACR,OAAO,GACP,MAAM,GACN,SAAS,GACT,OAAO,CAAC;AACZ,oBAAY,eAAe,GACvB,IAAI,GACJ,MAAM,GACN,WAAW,GACX,eAAe,GACf,CAAC,MAAM,MAAM,CAAC,cAAc,CAAC,CAAC;AAElC;;GAEG;AACH,oBAAY,qBAAqB,GAAG;IAClC;;OAEG;IACH,WAAW,EAAE,MAAM,CAAC;CACrB,CAAC;AAEF;;;GAGG;AACH,MAAM,WAAW,eAAe;IAC9B,QAAQ,CAAC,OAAO,EAAE,OAAO,CAAC;IAC1B,aAAa,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,OAAO,CAAC;IACzC,OAAO,EAAE,CAAC,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,CAAC,GAAG,IAAI,CAAC;IAC5D,gBAAgB,EAAE,CAChB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;IACV,mBAAmB,EAAE,CACnB,IAAI,EAAE,OAAO,EACb,QAAQ,EAAE,CAAC,IAAI,EAAE,eAAe,EAAE,EAAE,EAAE,KAAK,KAAK,GAAG,EACnD,OAAO,CAAC,EAAE,GAAG,KACV,IAAI,CAAC;CACX;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B;;OAEG;IACH,GAAG,EAAE,MAAM,CAAC;IACZ;;OAEG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf;;OAEG;IACH,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,eAAe,EAAE,OAAO,CAAC;IACzB;;;OAGG;IACH,OAAO,EAAE,MAAM,CAAC;IAChB;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B;;OAEG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D;;;;OAIG;IACH,yBAAyB,IAAI,IAAI,CAAC;IAElC;;OAEG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,eAAe,CAAC;IACzD;;OAEG;IACH,KAAK,IAAI,eAAe,CAAC;CAC1B;AAED,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,GAAG,GAAG,MAAM,IAAI,eAAe,CAgBxE;AAED;;;;;;;GAOG;AACH,qBAAa,WAAW;IACtB,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,WAAW,CAAC;IACpB,IAAI,CAAC,EAAE,GAAG,CAAC;IACX,OAAO,EAAE,eAAe,CAAC;IACzB;;OAEG;IACH,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B;;;OAGG;IACH,iBAAiB,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,QAAQ,EAAE,qBAAqB,KAAK,OAAO,CAAC,CAAC;IAC7E;;;;OAIG;IACH,uBAAuB,CAAC,EAAE,CACxB,aAAa,EAAE,aAAa,EAC5B,QAAQ,EAAE,qBAAqB,KAC5B,SAAS,GAAG,iBAAiB,CAAC;IACnC,QAAQ,CAAC,EAAE,GAAG,CAAC;IACf,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,eAAe,EAAE,OAAO,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B,iDAAiD;IACjD,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D,mDAAmD;IACnD,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;gBAG7D,GAAG,CAAC,EAAE,MAAM,EACZ,MAAM,CAAC,EAAE,WAAW,EACpB,IAAI,CAAC,EAAE,GAAG,EACV,KAAK,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,EAC9B,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,GAAG,eAAe,EAClD,kBAAkB,CAAC,EAAE,OAAO,EAC5B,eAAe,CAAC,EAAE,OAAO,EACzB,WAAW,CAAC,EAAE,eAAe,EAC7B,OAAO,CAAC,EAAE,MAAM,EAChB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC5D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,EAC9D,aAAa,CAAC,EAAE,aAAa,EAC7B,SAAS,CAAC,EAAE,OAAO,EACnB,aAAa,CAAC,EAAE,aAAa,EAC7B,aAAa,CAAC,EAAE,MAAM;IAoBxB;;;;OAIG;IACH,yBAAyB,IAAI,IAAI;IASjC;;;;OAIG;IACH,OAAO,CAAC,OAAO,EAAE,qBAAqB,GAAG,WAAW;IA6MpD;;;OAGG;IACH,KAAK,IAAI,WAAW;CAqCrB;AAED,MAAM,WAAW,qBAAqB;IACpC;;;OAGG;IACH,MAAM,EAAE,WAAW,CAAC;IACpB;;;OAGG;IACH,GAAG,CAAC,EAAE,MAAM,CAAC;IACb;;;;;;;;;;OAUG;IACH,eAAe,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IAC1D;;;;OAIG;IACH,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB;;;;OAIG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB;;;;;;;;;OASG;IACH,cAAc,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,cAAc,CAAA;KAAE,CAAC;IACzD,QAAQ,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAClC;;;;;;;;OAQG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IACjC;;OAEG;IACH,sBAAsB,CAAC,EAAE,OAAO,CAAC;IACjC;;OAEG;IACH,IAAI,CAAC,EAAE,GAAG,CAAC;IACX;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC;IAC7B;;OAEG;IACH,OAAO,CAAC,EAAE;QAAE,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAA;KAAE,CAAC;IAC/B;;OAEG;IACH,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B;;OAEG;IACH,0BAA0B,CAAC,EAAE,OAAO,CAAC;IACrC;;OAEG;IACH,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,eAAe,CAAC;IAC9B;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IAEvB,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC7D,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAC/D,kBAAkB,CAAC,EAAE,OAAO,CAAC;CAC9B;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,GAAG,CAAC;IACX,eAAe,EAAE,OAAO,CAAC;IACzB,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,kBAAkB;IACjC;;;OAGG;IACH,aAAa,CAAC,EAAE;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG,MAAM,CAAA;KAAE,CAAC;IAE1C;;OAEG;IACH,WAAW,CAAC,EAAE,eAAe,CAAC;IAE9B;;OAEG;IACH,OAAO,CAAC,EAAE,MAAM,CAAC;IAEjB;;OAEG;IACH,gBAAgB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE7D;;OAEG;IACH,kBAAkB,CAAC,EAAE,CAAC,QAAQ,EAAE,qBAAqB,KAAK,IAAI,CAAC;IAE/D,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js b/node_modules/@azure/ms-rest-js/es/lib/webResource.js new file mode 100644 index 0000000000..8fd1460f45 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js @@ -0,0 +1,257 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; +import { Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +export function isWebResourceLike(object) { + if (typeof object !== "object") { + return false; + } + if (typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function") { + return true; + } + return false; +} +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +var WebResource = /** @class */ (function () { + function WebResource(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, agentSettings, redirectLimit) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + WebResource.prototype.validateRequestProperties = function () { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + }; + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + WebResource.prototype.prepare = function (options) { + if (!options) { + throw new Error("options object is required"); + } + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + if (options.url && options.pathTemplate) { + throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); + } + if ((options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string")) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + // set the method + if (options.method) { + var validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error('The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods)); + } + } + this.method = options.method.toUpperCase(); + // construct the url if path template is provided + if (options.pathTemplate) { + var pathTemplate_1 = options.pathTemplate, pathParameters_1 = options.pathParameters; + if (typeof pathTemplate_1 !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + var baseUrl = options.baseUrl; + var url_1 = baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate_1.startsWith("/") ? pathTemplate_1.slice(1) : pathTemplate_1); + var segments = url_1.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters_1) { + throw new Error("pathTemplate: " + pathTemplate_1 + " has been provided. Hence, options.pathParameters must also be provided."); + } + segments.forEach(function (item) { + var pathParamName = item.slice(1, -1); + var pathParam = pathParameters_1[pathParamName]; + if (pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object")) { + throw new Error("pathTemplate: " + pathTemplate_1 + " contains the path parameter " + pathParamName + + (" however, it is not present in " + pathParameters_1 + " - " + JSON.stringify(pathParameters_1, undefined, 2) + ".") + + ("The value of the path parameter can either be a \"string\" of the form { " + pathParamName + ": \"some sample value\" } or ") + + ("it can be an \"object\" of the form { \"" + pathParamName + "\": { value: \"some sample value\", skipUrlEncoding: true } }.")); + } + if (typeof pathParam.valueOf() === "string") { + url_1 = url_1.replace(item, encodeURIComponent(pathParam)); + } + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error("options.pathParameters[" + pathParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (pathParam.skipUrlEncoding) { + url_1 = url_1.replace(item, pathParam.value); + } + else { + url_1 = url_1.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url_1; + } + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + var queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error("options.queryParameters must be of type object. It should be a JSON object " + + "of \"query-parameter-name\" as the key and the \"query-parameter-value\" as the value. " + + "The \"query-parameter-value\" may be fo type \"string\" or an \"object\" of the form { value: \"query-parameter-value\", skipUrlEncoding: true }."); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + var queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (var queryParamName in queryParameters) { + var queryParam = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } + else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error("options.queryParameters[" + queryParamName + "] is of type \"object\" but it does not contain a \"value\" property."); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } + else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + // add headers to the request if they are provided + if (options.headers) { + var headers = options.headers; + for (var _i = 0, _a = Object.keys(options.headers); _i < _a.length; _i++) { + var headerName = _a[_i]; + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } + else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + return this; + }; + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + WebResource.prototype.clone = function () { + var result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.agentSettings, this.redirectLimit); + if (this.formData) { + result.formData = this.formData; + } + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + return result; + }; + return WebResource; +}()); +export { WebResource }; +//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map new file mode 100644 index 0000000000..8cd8e4d675 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/webResource.js.map @@ -0,0 +1 @@ +{"version":3,"file":"webResource.js","sourceRoot":"","sources":["../../lib/webResource.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAE/F,OAAO,EAAE,WAAW,EAAmB,iBAAiB,EAAE,MAAM,eAAe,CAAC;AAEhF,OAAO,EAAU,UAAU,EAAE,MAAM,cAAc,CAAC;AAClD,OAAO,EAAE,YAAY,EAAE,MAAM,cAAc,CAAC;AA2J5C,MAAM,UAAU,iBAAiB,CAAC,MAAW;IAC3C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;QAC9B,OAAO,KAAK,CAAC;KACd;IACD,IACE,OAAO,MAAM,CAAC,GAAG,KAAK,QAAQ;QAC9B,OAAO,MAAM,CAAC,MAAM,KAAK,QAAQ;QACjC,OAAO,MAAM,CAAC,OAAO,KAAK,QAAQ;QAClC,iBAAiB,CAAC,MAAM,CAAC,OAAO,CAAC;QACjC,OAAO,MAAM,CAAC,yBAAyB,KAAK,UAAU;QACtD,OAAO,MAAM,CAAC,OAAO,KAAK,UAAU;QACpC,OAAO,MAAM,CAAC,KAAK,KAAK,UAAU,EAClC;QACA,OAAO,IAAI,CAAC;KACb;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;GAOG;AACH;IAyCE,qBACE,GAAY,EACZ,MAAoB,EACpB,IAAU,EACV,KAA8B,EAC9B,OAAkD,EAClD,kBAA4B,EAC5B,eAAyB,EACzB,WAA6B,EAC7B,OAAgB,EAChB,gBAA4D,EAC5D,kBAA8D,EAC9D,aAA6B,EAC7B,SAAmB,EACnB,aAA6B,EAC7B,aAAsB;QAEtB,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,GAAG,GAAG,GAAG,IAAI,EAAE,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,KAAK,CAAC;QAC9B,IAAI,CAAC,OAAO,GAAG,iBAAiB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,WAAW,CAAC,OAAO,CAAC,CAAC;QAC/E,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;QACjB,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;QACnB,IAAI,CAAC,QAAQ,GAAG,SAAS,CAAC;QAC1B,IAAI,CAAC,eAAe,GAAG,eAAe,IAAI,KAAK,CAAC;QAChD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,OAAO,IAAI,CAAC,CAAC;QAC5B,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;QAC7C,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,+CAAyB,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;SACpD;QACD,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;YACb,MAAM,IAAI,KAAK,CAAC,8BAA8B,CAAC,CAAC;SACjD;IACH,CAAC;IAED;;;;OAIG;IACH,6BAAO,GAAP,UAAQ,OAA8B;QACpC,IAAI,CAAC,OAAO,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QAED,IAAI,OAAO,CAAC,MAAM,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,MAAM,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;YAC/E,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;SACrD;QAED,IAAI,OAAO,CAAC,GAAG,IAAI,OAAO,CAAC,YAAY,EAAE;YACvC,MAAM,IAAI,KAAK,CACb,kGAAkG,CACnG,CAAC;SACH;QAED,IACE,CAAC,OAAO,CAAC,YAAY,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,YAAY,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC;YACzF,CAAC,OAAO,CAAC,GAAG,IAAI,SAAS,IAAI,OAAO,OAAO,CAAC,GAAG,CAAC,OAAO,EAAE,KAAK,QAAQ,CAAC,EACvE;YACA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;SACvF;QAED,iCAAiC;QACjC,IAAI,OAAO,CAAC,GAAG,EAAE;YACf,IAAI,OAAO,OAAO,CAAC,GAAG,KAAK,QAAQ,EAAE;gBACnC,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC1D;YACD,IAAI,CAAC,GAAG,GAAG,OAAO,CAAC,GAAG,CAAC;SACxB;QAED,iBAAiB;QACjB,IAAI,OAAO,CAAC,MAAM,EAAE;YAClB,IAAM,YAAY,GAAG,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,SAAS,EAAE,MAAM,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;YAC3F,IAAI,YAAY,CAAC,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7D,MAAM,IAAI,KAAK,CACb,uBAAuB;oBACrB,OAAO,CAAC,MAAM;oBACd,4CAA4C;oBAC5C,IAAI,CAAC,SAAS,CAAC,YAAY,CAAC,CAC/B,CAAC;aACH;SACF;QACD,IAAI,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,WAAW,EAAiB,CAAC;QAE1D,iDAAiD;QACjD,IAAI,OAAO,CAAC,YAAY,EAAE;YAChB,IAAA,cAAY,GAAqB,OAAO,aAA5B,EAAE,gBAAc,GAAK,OAAO,eAAZ,CAAa;YACjD,IAAI,OAAO,cAAY,KAAK,QAAQ,EAAE;gBACpC,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAC;aACnE;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,8BAA8B,CAAC;aAClD;YACD,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,IAAI,KAAG,GACL,OAAO;gBACP,CAAC,OAAO,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC;gBAClC,CAAC,cAAY,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,cAAY,CAAC,CAAC;YACxE,IAAM,QAAQ,GAAG,KAAG,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC;YAC9C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,EAAE;gBAC/B,IAAI,CAAC,gBAAc,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,6EAA0E,CACxG,CAAC;iBACH;gBACD,QAAQ,CAAC,OAAO,CAAC,UAAU,IAAI;oBAC7B,IAAM,aAAa,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;oBACxC,IAAM,SAAS,GAAI,gBAAyC,CAAC,aAAa,CAAC,CAAC;oBAC5E,IACE,SAAS,KAAK,IAAI;wBAClB,SAAS,KAAK,SAAS;wBACvB,CAAC,CAAC,OAAO,SAAS,KAAK,QAAQ,IAAI,OAAO,SAAS,KAAK,QAAQ,CAAC,EACjE;wBACA,MAAM,IAAI,KAAK,CACb,mBAAiB,cAAY,qCAAgC,aAAe;6BAC1E,oCAAkC,gBAAc,WAAM,IAAI,CAAC,SAAS,CAClE,gBAAc,EACd,SAAS,EACT,CAAC,CACF,MAAG,CAAA;6BACJ,8EAA0E,aAAa,kCAA6B,CAAA;6BACpH,6CAAwC,aAAa,mEAA6D,CAAA,CACrH,CAAC;qBACH;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,CAAC,CAAC;qBACxD;oBAED,IAAI,OAAO,SAAS,CAAC,OAAO,EAAE,KAAK,QAAQ,EAAE;wBAC3C,IAAI,CAAC,SAAS,CAAC,KAAK,EAAE;4BACpB,MAAM,IAAI,KAAK,CACb,4BAA0B,aAAa,0EAAmE,CAC3G,CAAC;yBACH;wBACD,IAAI,SAAS,CAAC,eAAe,EAAE;4BAC7B,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,SAAS,CAAC,KAAK,CAAC,CAAC;yBAC1C;6BAAM;4BACL,KAAG,GAAG,KAAG,CAAC,OAAO,CAAC,IAAI,EAAE,kBAAkB,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,CAAC;yBAC9D;qBACF;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,IAAI,CAAC,GAAG,GAAG,KAAG,CAAC;SAChB;QAED,iHAAiH;QACjH,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,IAAM,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;YAChD,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,IAAI,KAAK,CACb,6EAA6E;oBAC3E,yFAAqF;oBACrF,mJAA2I,CAC9I,CAAC;aACH;YACD,uDAAuD;YACvD,IAAI,IAAI,CAAC,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC5C,IAAI,CAAC,GAAG,IAAI,GAAG,CAAC;aACjB;YACD,wBAAwB;YACxB,IAAM,WAAW,GAAG,EAAE,CAAC;YACvB,4GAA4G;YAC5G,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;YAChB,KAAK,IAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IAAM,UAAU,GAAQ,eAAe,CAAC,cAAc,CAAC,CAAC;gBACxD,IAAI,UAAU,EAAE;oBACd,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBAClC,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC,CAAC;wBACxE,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,CAAC;qBAC7D;yBAAM,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;wBACzC,IAAI,CAAC,UAAU,CAAC,KAAK,EAAE;4BACrB,MAAM,IAAI,KAAK,CACb,6BAA2B,cAAc,0EAAmE,CAC7G,CAAC;yBACH;wBACD,IAAI,UAAU,CAAC,eAAe,EAAE;4BAC9B,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC;4BAC1D,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,UAAU,CAAC,KAAK,CAAC;yBAC/C;6BAAM;4BACL,WAAW,CAAC,IAAI,CAAC,cAAc,GAAG,GAAG,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC;4BAC9E,IAAI,CAAC,KAAK,CAAC,cAAc,CAAC,GAAG,kBAAkB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;yBACnE;qBACF;iBACF;aACF,CAAC,aAAa;YACf,yBAAyB;YACzB,IAAI,CAAC,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACnC;QAED,kDAAkD;QAClD,IAAI,OAAO,CAAC,OAAO,EAAE;YACnB,IAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;YAChC,KAAyB,UAA4B,EAA5B,KAAA,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAA5B,cAA4B,EAA5B,IAA4B,EAAE;gBAAlD,IAAM,UAAU,SAAA;gBACnB,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC;aACnD;SACF;QACD,0CAA0C;QAC1C,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,CAAC,EAAE;YACxC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,iBAAiB,EAAE,OAAO,CAAC,CAAC;SAC9C;QACD,yCAAyC;QACzC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,IAAI,CAAC,OAAO,CAAC,sBAAsB,EAAE;YAClF,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE,YAAY,EAAE,CAAC,CAAC;SAC5D;QAED,UAAU;QACV,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,EAAE;YACrC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,iCAAiC,CAAC,CAAC;SACrE;QAED,0HAA0H;QAC1H,IAAI,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;QACzB,IAAI,OAAO,CAAC,IAAI,IAAI,SAAS,EAAE;YAC7B,6HAA6H;YAC7H,IAAI,OAAO,CAAC,YAAY,EAAE;gBACxB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,EAAE;oBAC1C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,mBAAmB,EAAE,SAAS,CAAC,CAAC;iBAClD;gBACD,IAAI,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,KAAK,0BAA0B,EAAE;oBACnE,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,EAAE,0BAA0B,CAAC,CAAC;iBAC9D;aACF;iBAAM;gBACL,IAAI,OAAO,CAAC,mBAAmB,EAAE;oBAC/B,IAAI,CAAC,IAAI,GAAG,IAAI,UAAU,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,SAAS,CACnD,OAAO,CAAC,mBAAmB,EAC3B,OAAO,CAAC,IAAI,EACZ,aAAa,CACd,CAAC;iBACH;gBACD,IAAI,CAAC,OAAO,CAAC,0BAA0B,EAAE;oBACvC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;iBAC1C;aACF;SACF;QAED,IAAI,CAAC,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACvC,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,CAAC;QACjD,IAAI,CAAC,aAAa,GAAG,OAAO,CAAC,aAAa,CAAC;QAC3C,IAAI,CAAC,kBAAkB,GAAG,OAAO,CAAC,kBAAkB,CAAC;QAErD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACH,2BAAK,GAAL;QACE,IAAM,MAAM,GAAG,IAAI,WAAW,CAC5B,IAAI,CAAC,GAAG,EACR,IAAI,CAAC,MAAM,EACX,IAAI,CAAC,IAAI,EACT,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC,KAAK,EAAE,EACpC,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,eAAe,EACpB,IAAI,CAAC,WAAW,EAChB,IAAI,CAAC,OAAO,EACZ,IAAI,CAAC,gBAAgB,EACrB,IAAI,CAAC,kBAAkB,EACvB,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,aAAa,EAClB,IAAI,CAAC,aAAa,CACnB,CAAC;QAEF,IAAI,IAAI,CAAC,QAAQ,EAAE;YACjB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,CAAC;SACjC;QAED,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,MAAM,CAAC,aAAa,GAAG,IAAI,CAAC,aAAa,CAAC;SAC3C;QAED,IAAI,IAAI,CAAC,iBAAiB,EAAE;YAC1B,MAAM,CAAC,iBAAiB,GAAG,IAAI,CAAC,iBAAiB,CAAC;SACnD;QAED,IAAI,IAAI,CAAC,uBAAuB,EAAE;YAChC,MAAM,CAAC,uBAAuB,GAAG,IAAI,CAAC,uBAAuB,CAAC;SAC/D;QAED,OAAO,MAAM,CAAC;IAChB,CAAC;IACH,kBAAC;AAAD,CAAC,AArVD,IAqVC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts new file mode 100644 index 0000000000..8a823d6990 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts @@ -0,0 +1,12 @@ +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export declare class XhrHttpClient implements HttpClient { + sendRequest(request: WebResourceLike): Promise; +} +export declare function parseHeaders(xhr: XMLHttpRequest): HttpHeaders; +//# sourceMappingURL=xhrHttpClient.d.ts.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map new file mode 100644 index 0000000000..b1aa40f77f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.d.ts","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,eAAe,EAAyB,MAAM,eAAe,CAAC;AACvE,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAGhE;;GAEG;AACH,qBAAa,aAAc,YAAW,UAAU;IACvC,WAAW,CAAC,OAAO,EAAE,eAAe,GAAG,OAAO,CAAC,qBAAqB,CAAC;CAuG7E;AAgBD,wBAAgB,YAAY,CAAC,GAAG,EAAE,cAAc,eAa/C"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js new file mode 100644 index 0000000000..4fa6828c22 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpHeaders } from "./httpHeaders"; +import { RestError } from "./restError"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +var XhrHttpClient = /** @class */ (function () { + function XhrHttpClient() { + } + XhrHttpClient.prototype.sendRequest = function (request) { + var xhr = new XMLHttpRequest(); + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + var abortSignal = request.abortSignal; + if (abortSignal) { + var listener_1 = function () { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener_1); + xhr.addEventListener("readystatechange", function () { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener_1); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + if (request.formData) { + var formData = request.formData; + var requestForm_1 = new FormData(); + var appendFormValue = function (key, value) { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm_1.append(key, value.value, value.options); + } + else { + requestForm_1.append(key, value); + } + }; + for (var _i = 0, _a = Object.keys(formData); _i < _a.length; _i++) { + var formKey = _a[_i]; + var formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (var j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } + else { + appendFormValue(formKey, formValue); + } + } + request.body = requestForm_1; + request.formData = undefined; + var contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (var _b = 0, _c = request.headers.headersArray(); _b < _c.length; _b++) { + var header = _c[_b]; + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + if (request.streamResponseBody) { + return new Promise(function (resolve, reject) { + xhr.addEventListener("readystatechange", function () { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + var blobBody = new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody: blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", function () { + return resolve({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + }; + return XhrHttpClient; +}()); +export { XhrHttpClient }; +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", function (rawEvent) { + return listener({ + loadedBytes: rawEvent.loaded, + }); + }); + } +} +// exported locally for testing +export function parseHeaders(xhr) { + var responseHeaders = new HttpHeaders(); + var headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (var _i = 0, headerLines_1 = headerLines; _i < headerLines_1.length; _i++) { + var line = headerLines_1[_i]; + var index = line.indexOf(":"); + var headerName = line.slice(0, index); + var headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", function () { + return reject(new RestError("Failed to send request to " + request.url, RestError.REQUEST_SEND_ERROR, undefined, request)); + }); + xhr.addEventListener("abort", function () { + return reject(new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request)); + }); + xhr.addEventListener("timeout", function () { + return reject(new RestError("timeout of " + xhr.timeout + "ms exceeded", RestError.REQUEST_SEND_ERROR, undefined, request)); + }); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map new file mode 100644 index 0000000000..1c00438a07 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/es/lib/xhrHttpClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"xhrHttpClient.js","sourceRoot":"","sources":["../../lib/xhrHttpClient.ts"],"names":[],"mappings":"AAAA,4DAA4D;AAC5D,+FAA+F;AAG/F,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAG5C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AAExC;;GAEG;AACH;IAAA;IAwGA,CAAC;IAvGQ,mCAAW,GAAlB,UAAmB,OAAwB;QACzC,IAAM,GAAG,GAAG,IAAI,cAAc,EAAE,CAAC;QAEjC,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;SAC7E;QAED,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,MAAM,IAAI,KAAK,CAAC,oDAAoD,CAAC,CAAC;SACvE;QAED,IAAM,WAAW,GAAG,OAAO,CAAC,WAAW,CAAC;QACxC,IAAI,WAAW,EAAE;YACf,IAAM,UAAQ,GAAG;gBACf,GAAG,CAAC,KAAK,EAAE,CAAC;YACd,CAAC,CAAC;YACF,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;YAChD,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;gBACvC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,IAAI,EAAE;oBAC1C,WAAW,CAAC,mBAAmB,CAAC,OAAO,EAAE,UAAQ,CAAC,CAAC;iBACpD;YACH,CAAC,CAAC,CAAC;SACJ;QAED,mBAAmB,CAAC,GAAG,CAAC,MAAM,EAAE,OAAO,CAAC,gBAAgB,CAAC,CAAC;QAC1D,mBAAmB,CAAC,GAAG,EAAE,OAAO,CAAC,kBAAkB,CAAC,CAAC;QAErD,IAAI,OAAO,CAAC,QAAQ,EAAE;YACpB,IAAM,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;YAClC,IAAM,aAAW,GAAG,IAAI,QAAQ,EAAE,CAAC;YACnC,IAAM,eAAe,GAAG,UAAC,GAAW,EAAE,KAAU;gBAC9C,IAAI,KAAK,IAAI,KAAK,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,KAAK,CAAC,cAAc,CAAC,SAAS,CAAC,EAAE;oBAC7E,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,KAAK,EAAE,KAAK,CAAC,OAAO,CAAC,CAAC;iBACrD;qBAAM;oBACL,aAAW,CAAC,MAAM,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBAChC;YACH,CAAC,CAAC;YACF,KAAsB,UAAqB,EAArB,KAAA,MAAM,CAAC,IAAI,CAAC,QAAQ,CAAC,EAArB,cAAqB,EAArB,IAAqB,EAAE;gBAAxC,IAAM,OAAO,SAAA;gBAChB,IAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,CAAC;gBACpC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,EAAE;oBAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;wBACzC,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;qBACxC;iBACF;qBAAM;oBACL,eAAe,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;iBACrC;aACF;YAED,OAAO,CAAC,IAAI,GAAG,aAAW,CAAC;YAC3B,OAAO,CAAC,QAAQ,GAAG,SAAS,CAAC;YAC7B,IAAM,WAAW,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,cAAc,CAAC,CAAC;YACxD,IAAI,WAAW,IAAI,WAAW,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;gBACpE,kEAAkE;gBAClE,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;aACxC;SACF;QAED,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,CAAC;QACtC,GAAG,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC9B,GAAG,CAAC,eAAe,GAAG,OAAO,CAAC,eAAe,CAAC;QAC9C,KAAqB,UAA8B,EAA9B,KAAA,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAA9B,cAA8B,EAA9B,IAA8B,EAAE;YAAhD,IAAM,MAAM,SAAA;YACf,GAAG,CAAC,gBAAgB,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACjD;QACD,GAAG,CAAC,YAAY,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC;QAEhE,2CAA2C;QAC3C,GAAG,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAE3D,IAAI,OAAO,CAAC,kBAAkB,EAAE;YAC9B,OAAO,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;gBACjC,GAAG,CAAC,gBAAgB,CAAC,kBAAkB,EAAE;oBACvC,wCAAwC;oBACxC,IAAI,GAAG,CAAC,UAAU,KAAK,cAAc,CAAC,gBAAgB,EAAE;wBACtD,IAAM,QAAQ,GAAG,IAAI,OAAO,CAAO,UAAC,OAAO,EAAE,MAAM;4BACjD,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;gCAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;4BACxB,CAAC,CAAC,CAAC;4BACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;wBAC9C,CAAC,CAAC,CAAC;wBACH,OAAO,CAAC;4BACN,OAAO,SAAA;4BACP,MAAM,EAAE,GAAG,CAAC,MAAM;4BAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;4BAC1B,QAAQ,UAAA;yBACT,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;gBACH,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;aAAM;YACL,OAAO,IAAI,OAAO,CAAC,UAAU,OAAO,EAAE,MAAM;gBAC1C,GAAG,CAAC,gBAAgB,CAAC,MAAM,EAAE;oBAC3B,OAAA,OAAO,CAAC;wBACN,OAAO,SAAA;wBACP,MAAM,EAAE,GAAG,CAAC,MAAM;wBAClB,OAAO,EAAE,YAAY,CAAC,GAAG,CAAC;wBAC1B,UAAU,EAAE,GAAG,CAAC,YAAY;qBAC7B,CAAC;gBALF,CAKE,CACH,CAAC;gBACF,qBAAqB,CAAC,OAAO,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAC9C,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IACH,oBAAC;AAAD,CAAC,AAxGD,IAwGC;;AAED,SAAS,mBAAmB,CAC1B,GAA8B,EAC9B,QAAoD;IAEpD,IAAI,QAAQ,EAAE;QACZ,GAAG,CAAC,gBAAgB,CAAC,UAAU,EAAE,UAAC,QAAQ;YACxC,OAAA,QAAQ,CAAC;gBACP,WAAW,EAAE,QAAQ,CAAC,MAAM;aAC7B,CAAC;QAFF,CAEE,CACH,CAAC;KACH;AACH,CAAC;AAED,+BAA+B;AAC/B,MAAM,UAAU,YAAY,CAAC,GAAmB;IAC9C,IAAM,eAAe,GAAG,IAAI,WAAW,EAAE,CAAC;IAC1C,IAAM,WAAW,GAAG,GAAG;SACpB,qBAAqB,EAAE;SACvB,IAAI,EAAE;SACN,KAAK,CAAC,SAAS,CAAC,CAAC;IACpB,KAAmB,UAAW,EAAX,2BAAW,EAAX,yBAAW,EAAX,IAAW,EAAE;QAA3B,IAAM,IAAI,oBAAA;QACb,IAAM,KAAK,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAChC,IAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,IAAM,WAAW,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC;QAC1C,eAAe,CAAC,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC,CAAC;KAC9C;IACD,OAAO,eAAe,CAAC;AACzB,CAAC;AAED,SAAS,qBAAqB,CAC5B,OAAwB,EACxB,GAAmB,EACnB,MAA0B;IAE1B,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,+BAA6B,OAAO,CAAC,GAAK,EAC1C,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,OAAO,EAAE;QAC5B,OAAA,MAAM,CACJ,IAAI,SAAS,CAAC,yBAAyB,EAAE,SAAS,CAAC,qBAAqB,EAAE,SAAS,EAAE,OAAO,CAAC,CAC9F;IAFD,CAEC,CACF,CAAC;IACF,GAAG,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC9B,OAAA,MAAM,CACJ,IAAI,SAAS,CACX,gBAAc,GAAG,CAAC,OAAO,gBAAa,EACtC,SAAS,CAAC,kBAAkB,EAC5B,SAAS,EACT,OAAO,CACR,CACF;IAPD,CAOC,CACF,CAAC;AACJ,CAAC"} \ No newline at end of file diff --git a/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts new file mode 100644 index 0000000000..2368e0547f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/browserFetchHttpClient.ts @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class BrowserFetchHttpClient extends FetchHttpClient { + prepareRequest(_httpRequest: WebResourceLike): Promise> { + return Promise.resolve({}); + } + + processRequest(_operationResponse: HttpOperationResponse): Promise { + return Promise.resolve(); + } + + fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return fetch(input, init); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts new file mode 100644 index 0000000000..cface5afca --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/apiKeyCredentials.ts @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +/** + * @interface ApiKeyCredentialOptions + * Describes the options to be provided while creating an instance of ApiKeyCredentials + */ +export interface ApiKeyCredentialOptions { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + inQuery?: { [x: string]: any }; +} + +/** + * Authenticates to a service using an API key. + */ +export class ApiKeyCredentials implements ServiceClientCredentials { + /** + * A key value pair of the header parameters that need to be applied to the request. + */ + private readonly inHeader?: { [x: string]: any }; + /** + * A key value pair of the query parameters that need to be applied to the request. + */ + private readonly inQuery?: { [x: string]: any }; + + /** + * @constructor + * @param {object} options Specifies the options to be provided for auth. Either header or query needs to be provided. + */ + constructor(options: ApiKeyCredentialOptions) { + if (!options || (options && !options.inHeader && !options.inQuery)) { + throw new Error( + `options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.` + ); + } + this.inHeader = options.inHeader; + this.inQuery = options.inQuery; + } + + /** + * Signs a request with the values provided in the inHeader and inQuery parameter. + * + * @param {WebResource} webResource The WebResource to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike): Promise { + if (!webResource) { + return Promise.reject( + new Error(`webResource cannot be null or undefined and must be of type "object".`) + ); + } + + if (this.inHeader) { + if (!webResource.headers) { + webResource.headers = new HttpHeaders(); + } + for (const headerName in this.inHeader) { + webResource.headers.set(headerName, this.inHeader[headerName]); + } + } + + if (this.inQuery) { + if (!webResource.url) { + return Promise.reject(new Error(`url cannot be null in the request object.`)); + } + if (webResource.url.indexOf("?") < 0) { + webResource.url += "?"; + } + for (const key in this.inQuery) { + if (!webResource.url.endsWith("?")) { + webResource.url += "&"; + } + webResource.url += `${key}=${this.inQuery[key]}`; + } + } + + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts new file mode 100644 index 0000000000..33992d8b26 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/azureIdentityTokenCredentialAdapter.ts @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "./serviceClientCredentials"; +import { Constants as MSRestConstants } from "../util/constants"; +import { WebResource } from "../webResource"; + +import { TokenCredential } from "@azure/core-auth"; +import { TokenResponse } from "./tokenResponse"; + +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * Resource manager endpoints to match in order to specify a valid scope to the AzureIdentityCredentialAdapter. + */ +export const azureResourceManagerEndpoints = [ + "https://management.windows.net", + "https://management.chinacloudapi.cn", + "https://management.usgovcloudapi.net", + "https://management.cloudapi.de", +]; + +/** + * This class provides a simple extension to use {@link TokenCredential} from `@azure/identity` library to + * use with legacy Azure SDKs that accept {@link ServiceClientCredentials} family of credentials for authentication. + */ +export class AzureIdentityCredentialAdapter implements ServiceClientCredentials { + private azureTokenCredential: TokenCredential; + private scopes: string | string[]; + constructor( + azureTokenCredential: TokenCredential, + scopes: string | string[] = "https://management.azure.com/.default" + ) { + this.azureTokenCredential = azureTokenCredential; + this.scopes = scopes; + } + + public async getToken(): Promise { + const accessToken = await this.azureTokenCredential.getToken(this.scopes); + if (accessToken !== null) { + const result: TokenResponse = { + accessToken: accessToken.token, + tokenType: DEFAULT_AUTHORIZATION_SCHEME, + expiresOn: accessToken.expiresOnTimestamp, + }; + return result; + } else { + throw new Error("Could find token for scope"); + } + } + + public async signRequest(webResource: WebResource) { + const tokenResponse = await this.getToken(); + webResource.headers.set( + MSRestConstants.HeaderConstants.AUTHORIZATION, + `${tokenResponse.tokenType} ${tokenResponse.accessToken}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts new file mode 100644 index 0000000000..e92242d3dc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/basicAuthenticationCredentials.ts @@ -0,0 +1,54 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import * as base64 from "../util/base64"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; + +export class BasicAuthenticationCredentials implements ServiceClientCredentials { + userName: string; + password: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new BasicAuthenticationCredentials object. + * + * @constructor + * @param {string} userName User name. + * @param {string} password Password. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor( + userName: string, + password: string, + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME + ) { + if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { + throw new Error("userName cannot be null or undefined and must be of type string."); + } + if (password === null || password === undefined || typeof password.valueOf() !== "string") { + throw new Error("password cannot be null or undefined and must be of type string."); + } + this.userName = userName; + this.password = password; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @returns {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + const credentials = `${this.userName}:${this.password}`; + const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts new file mode 100644 index 0000000000..7e9096bb3e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/credentials.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export type Authenticator = (challenge: object) => Promise; diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts new file mode 100644 index 0000000000..bf6615ba72 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/domainCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class DomainCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid DomainCredentials object. + * + * @constructor + * @param {string} domainKey The EventGrid domain key + */ + constructor(domainKey: string) { + if (!domainKey || (domainKey && typeof domainKey !== "string")) { + throw new Error("domainKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": domainKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts new file mode 100644 index 0000000000..9a0247da07 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/serviceClientCredentials.ts @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "../webResource"; + +export interface ServiceClientCredentials { + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike/request to be signed. + * @returns {Promise} The signed request object; + */ + signRequest(webResource: WebResourceLike): Promise; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts new file mode 100644 index 0000000000..015163029e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenCredentials.ts @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { ServiceClientCredentials } from "./serviceClientCredentials"; + +const HeaderConstants = Constants.HeaderConstants; +const DEFAULT_AUTHORIZATION_SCHEME = "Bearer"; + +/** + * A credentials object that uses a token string and a authorzation scheme to authenticate. + */ +export class TokenCredentials implements ServiceClientCredentials { + token: string; + authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME; + + /** + * Creates a new TokenCredentials object. + * + * @constructor + * @param {string} token The token. + * @param {string} [authorizationScheme] The authorization scheme. + */ + constructor(token: string, authorizationScheme: string = DEFAULT_AUTHORIZATION_SCHEME) { + if (!token) { + throw new Error("token cannot be null or undefined."); + } + this.token = token; + this.authorizationScheme = authorizationScheme; + } + + /** + * Signs a request with the Authentication header. + * + * @param {WebResourceLike} webResource The WebResourceLike to be signed. + * @return {Promise} The signed request object. + */ + signRequest(webResource: WebResourceLike) { + if (!webResource.headers) webResource.headers = new HttpHeaders(); + webResource.headers.set( + HeaderConstants.AUTHORIZATION, + `${this.authorizationScheme} ${this.token}` + ); + return Promise.resolve(webResource); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts new file mode 100644 index 0000000000..aef8969e13 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/tokenResponse.ts @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * TokenResponse is defined in `@azure/ms-rest-nodeauth` and is copied here to not + * add an unnecessary dependency. + */ +export interface TokenResponse { + readonly tokenType: string; + readonly accessToken: string; + readonly [x: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts new file mode 100644 index 0000000000..55d435a736 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/credentials/topicCredentials.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ApiKeyCredentials, ApiKeyCredentialOptions } from "./apiKeyCredentials"; + +export class TopicCredentials extends ApiKeyCredentials { + /** + * Creates a new EventGrid TopicCredentials object. + * + * @constructor + * @param {string} topicKey The EventGrid topic key + */ + constructor(topicKey: string) { + if (!topicKey || (topicKey && typeof topicKey !== "string")) { + throw new Error("topicKey cannot be null or undefined and must be of type string."); + } + const options: ApiKeyCredentialOptions = { + inHeader: { + "aeg-sas-key": topicKey, + }, + }; + super(options); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts new file mode 100644 index 0000000000..5ac7a79d6f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.browser.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts new file mode 100644 index 0000000000..2f1c29dc62 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/defaultHttpClient.ts @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; diff --git a/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts new file mode 100644 index 0000000000..d28b01186a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/fetchHttpClient.ts @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import AbortController from "abort-controller"; +import FormData from "form-data"; + +import { HttpClient } from "./httpClient"; +import { WebResourceLike } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { HttpHeaders, HttpHeadersLike } from "./httpHeaders"; +import { RestError } from "./restError"; +import { Readable, Transform } from "stream"; + +interface FetchError extends Error { + code?: string; + errno?: string; + type?: string; +} + +export type CommonRequestInfo = string; // we only call fetch() on string urls. + +export type CommonRequestInit = Omit & { + body?: any; + headers?: any; + signal?: any; +}; + +export type CommonResponse = Omit & { + body: any; + trailer: any; + formData: any; +}; + +export abstract class FetchHttpClient implements HttpClient { + async sendRequest(httpRequest: WebResourceLike): Promise { + if (!httpRequest && typeof httpRequest !== "object") { + throw new Error( + "'httpRequest' (WebResource) cannot be null or undefined and must be of type object." + ); + } + + const abortController = new AbortController(); + let abortListener: ((event: any) => void) | undefined; + if (httpRequest.abortSignal) { + if (httpRequest.abortSignal.aborted) { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + abortListener = (event: Event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + httpRequest.abortSignal.addEventListener("abort", abortListener); + } + + if (httpRequest.timeout) { + setTimeout(() => { + abortController.abort(); + }, httpRequest.timeout); + } + + if (httpRequest.formData) { + const formData: any = httpRequest.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + // value function probably returns a stream so we can provide a fresh stream on each retry + if (typeof value === "function") { + value = value(); + } + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + httpRequest.body = requestForm; + httpRequest.formData = undefined; + const contentType = httpRequest.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + if (typeof requestForm.getBoundary === "function") { + httpRequest.headers.set( + "Content-Type", + `multipart/form-data; boundary=${requestForm.getBoundary()}` + ); + } else { + // browser will automatically apply a suitable content-type header + httpRequest.headers.remove("Content-Type"); + } + } + } + + let body = httpRequest.body + ? typeof httpRequest.body === "function" + ? httpRequest.body() + : httpRequest.body + : undefined; + if (httpRequest.onUploadProgress && httpRequest.body) { + let loadedBytes = 0; + const uploadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + httpRequest.onUploadProgress!({ loadedBytes }); + callback(undefined, chunk); + }, + }); + + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } else { + uploadReportStream.end(body); + } + + body = uploadReportStream; + } + + const platformSpecificRequestInit: Partial = await this.prepareRequest( + httpRequest + ); + + const requestInit: RequestInit = { + body: body, + headers: httpRequest.headers.rawHeaders(), + method: httpRequest.method, + signal: abortController.signal, + redirect: "manual", + ...platformSpecificRequestInit, + }; + + let operationResponse: HttpOperationResponse | undefined; + try { + const response: CommonResponse = await this.fetch(httpRequest.url, requestInit); + + const headers = parseHeaders(response.headers); + operationResponse = { + headers: headers, + request: httpRequest, + status: response.status, + readableStreamBody: httpRequest.streamResponseBody + ? ((response.body as unknown) as NodeJS.ReadableStream) + : undefined, + bodyAsText: !httpRequest.streamResponseBody ? await response.text() : undefined, + redirected: response.redirected, + url: response.url, + }; + + const onDownloadProgress = httpRequest.onDownloadProgress; + if (onDownloadProgress) { + const responseBody: ReadableStream | undefined = response.body || undefined; + + if (isReadableStream(responseBody)) { + let loadedBytes = 0; + const downloadReportStream = new Transform({ + transform: (chunk: string | Buffer, _encoding, callback) => { + loadedBytes += chunk.length; + onDownloadProgress({ loadedBytes }); + callback(undefined, chunk); + }, + }); + responseBody.pipe(downloadReportStream); + operationResponse.readableStreamBody = downloadReportStream; + } else { + const length = parseInt(headers.get("Content-Length")!) || undefined; + if (length) { + // Calling callback for non-stream response for consistency with browser + onDownloadProgress({ loadedBytes: length }); + } + } + } + + await this.processRequest(operationResponse); + + return operationResponse; + } catch (error) { + const fetchError: FetchError = error; + if (fetchError.code === "ENOTFOUND") { + throw new RestError( + fetchError.message, + RestError.REQUEST_SEND_ERROR, + undefined, + httpRequest + ); + } else if (fetchError.type === "aborted") { + throw new RestError( + "The request was aborted", + RestError.REQUEST_ABORTED_ERROR, + undefined, + httpRequest + ); + } + + throw fetchError; + } finally { + // clean up event listener + if (httpRequest.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(operationResponse?.readableStreamBody)) { + downloadStreamDone = isStreamComplete(operationResponse!.readableStreamBody); + } + + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + httpRequest.abortSignal?.removeEventListener("abort", abortListener!); + return; + }) + .catch((_e) => {}); + } + } + } + + abstract async prepareRequest(httpRequest: WebResourceLike): Promise>; + abstract async processRequest(operationResponse: HttpOperationResponse): Promise; + abstract async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise; +} + +function isReadableStream(body: any): body is Readable { + return body && typeof body.pipe === "function"; +} + +function isStreamComplete(stream: Readable): Promise { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} + +export function parseHeaders(headers: Headers): HttpHeadersLike { + const httpHeaders = new HttpHeaders(); + + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + + return httpHeaders; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpClient.ts b/node_modules/@azure/ms-rest-js/lib/httpClient.ts new file mode 100644 index 0000000000..0d01984c80 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpClient.ts @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestPolicy } from "./policies/requestPolicy"; + +/** + * An interface that can send HttpRequests and receive promised HttpResponses. + */ +export interface HttpClient extends RequestPolicy {} diff --git a/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts new file mode 100644 index 0000000000..698279a4fd --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpHeaders.ts @@ -0,0 +1,230 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName: string) { + return headerName.toLowerCase(); +} + +/** + * An individual header within a HttpHeaders collection. + */ +export interface HttpHeader { + /** + * The name of the header. + */ + name: string; + + /** + * The value of the header. + */ + value: string; +} + +/** + * A HttpHeaders collection represented as a simple JSON object. + */ +export type RawHttpHeaders = { [headerName: string]: string }; + +/** + * A collection of HTTP header key/value pairs. + */ +export interface HttpHeadersLike { + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + set(headerName: string, headerValue: string | number): void; + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + get(headerName: string): string | undefined; + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName: string): boolean; + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + remove(headerName: string): boolean; + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders(): RawHttpHeaders; + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray(): HttpHeader[]; + /** + * Get the header names that are contained in this collection. + */ + headerNames(): string[]; + /** + * Get the header values that are contained in this collection. + */ + headerValues(): string[]; + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone(): HttpHeadersLike; + /** + * Get the JSON object representation of this HTTP header collection. + * The result is the same as `rawHeaders()`. + */ + toJson(): RawHttpHeaders; +} + +export function isHttpHeadersLike(object?: any): object is HttpHeadersLike { + if (!object || typeof object !== "object") { + return false; + } + + if ( + typeof object.rawHeaders === "function" && + typeof object.clone === "function" && + typeof object.get === "function" && + typeof object.set === "function" && + typeof object.contains === "function" && + typeof object.remove === "function" && + typeof object.headersArray === "function" && + typeof object.headerValues === "function" && + typeof object.headerNames === "function" && + typeof object.toJson === "function" + ) { + return true; + } + return false; +} + +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + private readonly _headersMap: { [headerKey: string]: HttpHeader }; + + constructor(rawHeaders?: RawHttpHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName The name of the header to set. This value is case-insensitive. + * @param headerValue The value of the header to set. + */ + public set(headerName: string, headerValue: string | number): void { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName The name of the header. + */ + public get(headerName: string): string | undefined { + const header: HttpHeader = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + public contains(headerName: string): boolean { + return !!this._headersMap[getHeaderKey(headerName)]; + } + + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName The name of the header to remove. + */ + public remove(headerName: string): boolean { + const result: boolean = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + + /** + * Get the headers that are contained this collection as an object. + */ + public rawHeaders(): RawHttpHeaders { + const result: RawHttpHeaders = {}; + for (const headerKey in this._headersMap) { + const header: HttpHeader = this._headersMap[headerKey]; + result[header.name.toLowerCase()] = header.value; + } + return result; + } + + /** + * Get the headers that are contained in this collection as an array. + */ + public headersArray(): HttpHeader[] { + const headers: HttpHeader[] = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerNames(): string[] { + const headerNames: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + + /** + * Get the header names that are contained in this collection. + */ + public headerValues(): string[] { + const headerValues: string[] = []; + const headers: HttpHeader[] = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + + /** + * Get the JSON object representation of this HTTP header collection. + */ + public toJson(): RawHttpHeaders { + return this.rawHeaders(); + } + + /** + * Get the string representation of this HTTP header collection. + */ + public toString(): string { + return JSON.stringify(this.toJson()); + } + + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + public clone(): HttpHeaders { + return new HttpHeaders(this.rawHeaders()); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts new file mode 100644 index 0000000000..c61dfd535e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpOperationResponse.ts @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { WebResourceLike } from "./webResource"; +import { HttpHeadersLike } from "./httpHeaders"; + +/** + * The properties on an HTTP response which will always be present. + */ +export interface HttpResponse { + /** + * The raw request + */ + request: WebResourceLike; + + /** + * The HTTP response status (e.g. 200) + */ + status: number; + + /** + * The HTTP response headers. + */ + headers: HttpHeadersLike; +} + +declare global { + /** + * Stub declaration of the browser-only Blob type. + * Full type information can be obtained by including "lib": ["dom"] in tsconfig.json. + */ + interface Blob {} +} + +/** + * Wrapper object for http request and response. Deserialized object is stored in + * the `parsedBody` property when the response body is received in JSON or XML. + */ +export interface HttpOperationResponse extends HttpResponse { + /** + * The parsed HTTP response headers. + */ + parsedHeaders?: { [key: string]: any }; + + /** + * The response body as text (string format) + */ + bodyAsText?: string | null; + + /** + * The response body as parsed JSON or XML + */ + parsedBody?: any; + + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always undefined in node.js. + */ + blobBody?: Promise; + + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always undefined in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + + /** + * The redirected property indicates whether the response is the result of a request which was redirected. + */ + redirected?: boolean; + + /** + * The url property contains the URL of the response. The value will be the final URL obtained after any redirects. + */ + url?: string; +} + +/** + * The flattened response to a REST call. + * Contains the underlying HttpOperationResponse as well as + * the merged properties of the parsedBody, parsedHeaders, etc. + */ +export interface RestResponse { + /** + * The underlying HTTP response containing both raw and deserialized response data. + */ + _response: HttpOperationResponse; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts new file mode 100644 index 0000000000..a8cf3a53a6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogLevel.ts @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +export enum HttpPipelineLogLevel { + /** + * A log level that indicates that no logs will be logged. + */ + OFF, + + /** + * An error log. + */ + ERROR, + + /** + * A warning log. + */ + WARNING, + + /** + * An information log. + */ + INFO, +} diff --git a/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts new file mode 100644 index 0000000000..47929c43b6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/httpPipelineLogger.ts @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; + +/** + * A Logger that can be added to a HttpPipeline. This enables each RequestPolicy to log messages + * that can be used for debugging purposes. + */ +export interface HttpPipelineLogger { + /** + * The log level threshold for what logs will be logged. + */ + minimumLogLevel: HttpPipelineLogLevel; + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * A HttpPipelineLogger that will send its logs to the console. + */ +export class ConsoleHttpPipelineLogger implements HttpPipelineLogger { + /** + * Create a new ConsoleHttpPipelineLogger. + * @param minimumLogLevel The log level threshold for what logs will be logged. + */ + constructor(public minimumLogLevel: HttpPipelineLogLevel) {} + + /** + * Log the provided message. + * @param logLevel The HttpLogDetailLevel associated with this message. + * @param message The message to log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void { + const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; + switch (logLevel) { + case HttpPipelineLogLevel.ERROR: + console.error(logMessage); + break; + + case HttpPipelineLogLevel.WARNING: + console.warn(logMessage); + break; + + case HttpPipelineLogLevel.INFO: + console.log(logMessage); + break; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/msRest.ts b/node_modules/@azure/ms-rest-js/lib/msRest.ts new file mode 100644 index 0000000000..0b0ea24c10 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/msRest.ts @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/// + +export { + WebResource, + WebResourceLike, + HttpRequestBody, + RequestPrepareOptions, + HttpMethods, + ParameterValue, + RequestOptionsBase, + TransferProgressEvent, + AbortSignalLike, +} from "./webResource"; +export { DefaultHttpClient } from "./defaultHttpClient"; +export { CommonRequestInfo, CommonRequestInit, CommonResponse } from "./fetchHttpClient"; +export { HttpClient } from "./httpClient"; +export { HttpHeader, HttpHeaders, HttpHeadersLike, RawHttpHeaders } from "./httpHeaders"; +export { HttpOperationResponse, HttpResponse, RestResponse } from "./httpOperationResponse"; +export { HttpPipelineLogger } from "./httpPipelineLogger"; +export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; +export { RestError } from "./restError"; +export { OperationArguments } from "./operationArguments"; +export { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, + ParameterPath, +} from "./operationParameter"; +export { OperationResponse } from "./operationResponse"; +export { OperationSpec } from "./operationSpec"; +export { + AgentSettings, + ProxySettings, + ServiceClient, + ServiceClientOptions, + flattenResponse, +} from "./serviceClient"; +export { QueryCollectionFormat } from "./queryCollectionFormat"; +export { Constants } from "./util/constants"; +export { logPolicy } from "./policies/logPolicy"; +export { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +export { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +export { agentPolicy } from "./policies/agentPolicy"; +export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; +export { RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +export { signingPolicy } from "./policies/signingPolicy"; +export { + TelemetryInfo, + userAgentPolicy, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +export { + DeserializationContentTypes, + deserializationPolicy, + deserializeResponseBody, +} from "./policies/deserializationPolicy"; +export { + MapperType, + SimpleMapperType, + CompositeMapperType, + DictionaryMapperType, + SequenceMapperType, + EnumMapperType, + Mapper, + BaseMapper, + CompositeMapper, + SequenceMapper, + DictionaryMapper, + EnumMapper, + MapperConstraints, + PolymorphicDiscriminator, + Serializer, + UrlParameterValue, + serializeObject, +} from "./serializer"; +export { + stripRequest, + stripResponse, + delay, + executePromisesSequentially, + generateUuid, + encodeUri, + ServiceCallback, + promiseToCallback, + promiseToServiceCallback, + isValidUuid, + applyMixins, + isNode, + isDuration, +} from "./util/utils"; +export { URLBuilder, URLQuery } from "./url"; + +// Credentials +export { TokenCredentials } from "./credentials/tokenCredentials"; +export { TokenResponse } from "./credentials/tokenResponse"; +export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; +export { ApiKeyCredentials, ApiKeyCredentialOptions } from "./credentials/apiKeyCredentials"; +export { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +export { TopicCredentials } from "./credentials/topicCredentials"; +export { DomainCredentials } from "./credentials/domainCredentials"; +export { Authenticator } from "./credentials/credentials"; +export { AzureIdentityCredentialAdapter } from "./credentials/azureIdentityTokenCredentialAdapter"; diff --git a/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts new file mode 100644 index 0000000000..d40b4efc27 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/nodeFetchHttpClient.ts @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as tough from "tough-cookie"; +import * as http from "http"; +import * as https from "https"; +import node_fetch from "node-fetch"; + +import { + CommonRequestInfo, + CommonRequestInit, + CommonResponse, + FetchHttpClient, +} from "./fetchHttpClient"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; +import { createProxyAgent, ProxyAgent } from "./proxyAgent"; + +export class NodeFetchHttpClient extends FetchHttpClient { + private readonly cookieJar = new tough.CookieJar(undefined, { looseMode: true }); + + async fetch(input: CommonRequestInfo, init?: CommonRequestInit): Promise { + return (node_fetch(input, init) as unknown) as Promise; + } + + async prepareRequest(httpRequest: WebResourceLike): Promise> { + const requestInit: Partial = {}; + + if (this.cookieJar && !httpRequest.headers.get("Cookie")) { + const cookieString = await new Promise((resolve, reject) => { + this.cookieJar!.getCookieString(httpRequest.url, (err, cookie) => { + if (err) { + reject(err); + } else { + resolve(cookie); + } + }); + }); + + httpRequest.headers.set("Cookie", cookieString); + } + + if (httpRequest.agentSettings) { + const { http: httpAgent, https: httpsAgent } = httpRequest.agentSettings; + if (httpsAgent && httpRequest.url.startsWith("https")) { + requestInit.agent = httpsAgent; + } else if (httpAgent) { + requestInit.agent = httpAgent; + } + } else if (httpRequest.proxySettings) { + const tunnel: ProxyAgent = createProxyAgent( + httpRequest.url, + httpRequest.proxySettings, + httpRequest.headers + ); + requestInit.agent = tunnel.agent; + } + + if (httpRequest.keepAlive === true) { + if (requestInit.agent) { + requestInit.agent.keepAlive = true; + } else { + const options: http.AgentOptions | https.AgentOptions = { keepAlive: true }; + const agent = httpRequest.url.startsWith("https") + ? new https.Agent(options) + : new http.Agent(options); + requestInit.agent = agent; + } + } + + return requestInit; + } + + async processRequest(operationResponse: HttpOperationResponse): Promise { + if (this.cookieJar) { + const setCookieHeader = operationResponse.headers.get("Set-Cookie"); + if (setCookieHeader != undefined) { + await new Promise((resolve, reject) => { + this.cookieJar!.setCookie( + setCookieHeader, + operationResponse.request.url, + { ignoreError: true }, + (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + } + ); + }); + } + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationArguments.ts b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts new file mode 100644 index 0000000000..55090db9e0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationArguments.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { RequestOptionsBase } from "./webResource"; + +/** + * A collection of properties that apply to a single invocation of an operation. + */ +export interface OperationArguments { + /** + * The parameters that were passed to the operation method. + */ + [parameterName: string]: any; + + /** + * The optional arugments that are provided to an operation. + */ + options?: RequestOptionsBase; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationParameter.ts b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts new file mode 100644 index 0000000000..150b18aea0 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationParameter.ts @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { Mapper } from "./serializer"; + +export type ParameterPath = string | string[] | { [propertyName: string]: ParameterPath }; + +/** + * A common interface that all Operation parameter's extend. + */ +export interface OperationParameter { + /** + * The path to this parameter's value in OperationArguments or the object that contains paths for + * each property's value in OperationArguments. + */ + parameterPath: ParameterPath; + + /** + * The mapper that defines how to validate and serialize this parameter's value. + */ + mapper: Mapper; +} + +/** + * A parameter for an operation that will be substituted into the operation's request URL. + */ +export interface OperationURLParameter extends OperationParameter { + /** + * Whether or not to skip encoding the URL parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; +} + +/** + * A parameter for an operation that will be added as a query parameter to the operation's HTTP + * request. + */ +export interface OperationQueryParameter extends OperationParameter { + /** + * Whether or not to skip encoding the query parameter's value before adding it to the URL. + */ + skipEncoding?: boolean; + + /** + * If this query parameter's value is a collection, what type of format should the value be + * converted to. + */ + collectionFormat?: QueryCollectionFormat; +} + +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +export function getPathStringFromParameter(parameter: OperationParameter): string { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} + +export function getPathStringFromParameterPath( + parameterPath: ParameterPath, + mapper: Mapper +): string { + let result: string; + if (typeof parameterPath === "string") { + result = parameterPath; + } else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } else { + result = mapper.serializedName!; + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationResponse.ts b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts new file mode 100644 index 0000000000..0fbea88c2c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationResponse.ts @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { Mapper } from "./serializer"; + +/** + * An OperationResponse that can be returned from an operation request for a single status code. + */ +export interface OperationResponse { + /** + * The mapper that will be used to deserialize the response headers. + */ + headersMapper?: Mapper; + + /** + * The mapper that will be used to deserialize the response body. + */ + bodyMapper?: Mapper; +} diff --git a/node_modules/@azure/ms-rest-js/lib/operationSpec.ts b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts new file mode 100644 index 0000000000..fe8b84a05e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/operationSpec.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + OperationParameter, + OperationQueryParameter, + OperationURLParameter, +} from "./operationParameter"; +import { OperationResponse } from "./operationResponse"; +import { MapperType, Serializer } from "./serializer"; +import { HttpMethods } from "./webResource"; + +/** + * A specification that defines an operation. + */ +export interface OperationSpec { + /** + * The serializer to use in this operation. + */ + readonly serializer: Serializer; + + /** + * The HTTP method that should be used by requests for this operation. + */ + readonly httpMethod: HttpMethods; + + /** + * The URL that was provided in the service's specification. This will still have all of the URL + * template variables in it. If this is not provided when the OperationSpec is created, then it + * will be populated by a "baseUri" property on the ServiceClient. + */ + readonly baseUrl?: string; + + /** + * The fixed path for this operation's URL. This will still have all of the URL template variables + * in it. + */ + readonly path?: string; + + /** + * The content type of the request body. This value will be used as the "Content-Type" header if + * it is provided. + */ + readonly contentType?: string; + + /** + * The parameter that will be used to construct the HTTP request's body. + */ + readonly requestBody?: OperationParameter; + + /** + * Whether or not this operation uses XML request and response bodies. + */ + readonly isXML?: boolean; + + /** + * The parameters to the operation method that will be substituted into the constructed URL. + */ + readonly urlParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be added to the constructed URL's query. + */ + readonly queryParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be converted to headers on the operation's + * HTTP request. + */ + readonly headerParameters?: ReadonlyArray; + + /** + * The parameters to the operation method that will be used to create a formdata body for the + * operation's HTTP request. + */ + readonly formDataParameters?: ReadonlyArray; + + /** + * The different types of responses that this operation can return based on what status code is + * returned. + */ + readonly responses: { [responseCode: string]: OperationResponse }; +} + +export function isStreamOperation(operationSpec: OperationSpec): boolean { + let result = false; + for (const statusCode in operationSpec.responses) { + const operationResponse: OperationResponse = operationSpec.responses[statusCode]; + if ( + operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream + ) { + result = true; + break; + } + } + return result; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts new file mode 100644 index 0000000000..cca71847dc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.browser.ts @@ -0,0 +1,33 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const agentNotSupportedInBrowser = new Error("AgentPolicy is not supported in browser environment"); + +export function agentPolicy(_agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw agentNotSupportedInBrowser; + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw agentNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw agentNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts new file mode 100644 index 0000000000..624a14beb4 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/agentPolicy.ts @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { AgentSettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +export function agentPolicy(agentSettings?: AgentSettings): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new AgentPolicy(nextPolicy, options, agentSettings!); + }, + }; +} + +export class AgentPolicy extends BaseRequestPolicy { + agentSettings: AgentSettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + agentSettings: AgentSettings + ) { + super(nextPolicy, options); + this.agentSettings = agentSettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.agentSettings) { + request.agentSettings = this.agentSettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts new file mode 100644 index 0000000000..f14f78cc9b --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/deserializationPolicy.ts @@ -0,0 +1,294 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { OperationResponse } from "../operationResponse"; +import { OperationSpec, isStreamOperation } from "../operationSpec"; +import { RestError } from "../restError"; +import { Mapper, MapperType } from "../serializer"; +import * as utils from "../util/utils"; +import { parseXML } from "../util/xml"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * The content-types that will indicate that an operation response should be deserialized in a + * particular way. + */ +export interface DeserializationContentTypes { + /** + * The content-types that indicate that an operation response should be deserialized as JSON. + * Defaults to [ "application/json", "text/json" ]. + */ + json?: string[]; + + /** + * The content-types that indicate that an operation response should be deserialized as XML. + * Defaults to [ "application/xml", "application/atom+xml" ]. + */ + xml?: string[]; +} + +/** + * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they + * pass through the HTTP pipeline. + */ +export function deserializationPolicy( + deserializationContentTypes?: DeserializationContentTypes +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new DeserializationPolicy(nextPolicy, deserializationContentTypes, options); + }, + }; +} + +export const defaultJsonContentTypes = ["application/json", "text/json"]; +export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; + +/** + * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the + * HTTP pipeline. + */ +export class DeserializationPolicy extends BaseRequestPolicy { + public readonly jsonContentTypes: string[]; + public readonly xmlContentTypes: string[]; + + constructor( + nextPolicy: RequestPolicy, + deserializationContentTypes: DeserializationContentTypes | undefined, + options: RequestPolicyOptionsLike + ) { + super(nextPolicy, options); + + this.jsonContentTypes = + (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; + this.xmlContentTypes = + (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; + } + + public async sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response: HttpOperationResponse) => + deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response) + ); + } +} + +function getOperationResponse( + parsedResponse: HttpOperationResponse +): undefined | OperationResponse { + let result: OperationResponse | undefined; + const request: WebResourceLike = parsedResponse.request; + const operationSpec: OperationSpec | undefined = request.operationSpec; + if (operationSpec) { + const operationResponseGetter: + | undefined + | (( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse) = request.operationResponseGetter; + if (!operationResponseGetter) { + result = operationSpec.responses[parsedResponse.status]; + } else { + result = operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} + +function shouldDeserializeResponse(parsedResponse: HttpOperationResponse): boolean { + const shouldDeserialize: undefined | boolean | ((response: HttpOperationResponse) => boolean) = + parsedResponse.request.shouldDeserialize; + let result: boolean; + if (shouldDeserialize === undefined) { + result = true; + } else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } else { + result = shouldDeserialize(parsedResponse); + } + return result; +} + +export function deserializeResponseBody( + jsonContentTypes: string[], + xmlContentTypes: string[], + response: HttpOperationResponse +): Promise { + return parse(jsonContentTypes, xmlContentTypes, response).then((parsedResponse) => { + const shouldDeserialize: boolean = shouldDeserializeResponse(parsedResponse); + if (shouldDeserialize) { + const operationSpec: OperationSpec | undefined = parsedResponse.request.operationSpec; + if (operationSpec && operationSpec.responses) { + const statusCode: number = parsedResponse.status; + + const expectedStatusCodes: string[] = Object.keys(operationSpec.responses); + + const hasNoExpectedStatusCodes: boolean = + expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default"); + + const responseSpec: OperationResponse | undefined = getOperationResponse(parsedResponse); + + const isExpectedStatusCode: boolean = hasNoExpectedStatusCodes + ? 200 <= statusCode && statusCode < 300 + : !!responseSpec; + if (!isExpectedStatusCode) { + const defaultResponseSpec: OperationResponse = operationSpec.responses.default; + if (defaultResponseSpec) { + const initialErrorMessage: string = isStreamOperation(operationSpec) + ? `Unexpected status code: ${statusCode}` + : (parsedResponse.bodyAsText as string); + + const error = new RestError(initialErrorMessage); + error.statusCode = statusCode; + error.request = utils.stripRequest(parsedResponse.request); + error.response = utils.stripResponse(parsedResponse); + + let parsedErrorResponse: { [key: string]: any } = parsedResponse.parsedBody; + try { + if (parsedErrorResponse) { + const defaultResponseBodyMapper: Mapper | undefined = + defaultResponseSpec.bodyMapper; + if ( + defaultResponseBodyMapper && + defaultResponseBodyMapper.serializedName === "CloudError" + ) { + if (parsedErrorResponse.error) { + parsedErrorResponse = parsedErrorResponse.error; + } + if (parsedErrorResponse.code) { + error.code = parsedErrorResponse.code; + } + if (parsedErrorResponse.message) { + error.message = parsedErrorResponse.message; + } + } else { + let internalError: any = parsedErrorResponse; + if (parsedErrorResponse.error) { + internalError = parsedErrorResponse.error; + } + + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + } + + if (defaultResponseBodyMapper) { + let valueToDeserialize: any = parsedErrorResponse; + if ( + operationSpec.isXML && + defaultResponseBodyMapper.type.name === MapperType.Sequence + ) { + valueToDeserialize = + typeof parsedErrorResponse === "object" + ? parsedErrorResponse[defaultResponseBodyMapper.xmlElementName!] + : []; + } + error.body = operationSpec.serializer.deserialize( + defaultResponseBodyMapper, + valueToDeserialize, + "error.body" + ); + } + } + } catch (defaultError) { + error.message = `Error \"${defaultError.message}\" occurred in deserializing the responseBody - \"${parsedResponse.bodyAsText}\" for the default response.`; + } + return Promise.reject(error); + } + } else if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize: any = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName!] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize( + responseSpec.bodyMapper, + valueToDeserialize, + "operationRes.parsedBody" + ); + } catch (error) { + const restError = new RestError( + `Error ${error} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}` + ); + restError.request = utils.stripRequest(parsedResponse.request); + restError.response = utils.stripResponse(parsedResponse); + return Promise.reject(restError); + } + } else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize( + responseSpec.headersMapper, + parsedResponse.headers.rawHeaders(), + "operationRes.parsedHeaders" + ); + } + } + } + } + return Promise.resolve(parsedResponse); + }); +} + +function parse( + jsonContentTypes: string[], + xmlContentTypes: string[], + operationResponse: HttpOperationResponse +): Promise { + const errorHandler = (err: Error & { code: string }) => { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError( + msg, + errCode, + operationResponse.status, + operationResponse.request, + operationResponse, + operationResponse.bodyAsText + ); + return Promise.reject(e); + }; + + if (!operationResponse.request.streamResponseBody && operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType: string = operationResponse.headers.get("Content-Type") || ""; + const contentComponents: string[] = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + if ( + contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1) + ) { + return new Promise((resolve) => { + operationResponse.parsedBody = JSON.parse(text); + resolve(operationResponse); + }).catch(errorHandler); + } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + return parseXML(text) + .then((body) => { + operationResponse.parsedBody = body; + return operationResponse; + }) + .catch(errorHandler); + } + } + + return Promise.resolve(operationResponse); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts new file mode 100644 index 0000000000..74aeb60d1c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/exponentialRetryPolicy.ts @@ -0,0 +1,220 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { RestError } from "../restError"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function exponentialRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ExponentialRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; +const DEFAULT_CLIENT_RETRY_COUNT = 3; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; +const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + */ +export class ExponentialRetryPolicy extends BaseRequestPolicy { + /** + * The client retry count. + */ + retryCount: number; + /** + * The client retry interval in milliseconds. + */ + retryInterval: number; + /** + * The minimum retry interval in milliseconds. + */ + minRetryInterval: number; + /** + * The maximum retry interval in milliseconds. + */ + maxRetryInterval: number; + + /** + * @constructor + * @param {RequestPolicy} nextPolicy The next RequestPolicy in the pipeline chain. + * @param {RequestPolicyOptionsLike} options The options for this RequestPolicy. + * @param {number} [retryCount] The client retry count. + * @param {number} [retryInterval] The client retry interval, in milliseconds. + * @param {number} [minRetryInterval] The minimum retry interval, in milliseconds. + * @param {number} [maxRetryInterval] The maximum retry interval, in milliseconds. + */ + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + function isNumber(n: any): n is number { + return typeof n === "number"; + } + this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = isNumber(minRetryInterval) + ? minRetryInterval + : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = isNumber(maxRetryInterval) + ? maxRetryInterval + : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => retry(this, request, response)) + .catch((error) => retry(this, request, error.response, undefined, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry( + policy: ExponentialRetryPolicy, + statusCode: number | undefined, + retryData: RetryData +): boolean { + if ( + statusCode == undefined || + (statusCode < 500 && statusCode !== 408) || + statusCode === 501 || + statusCode === 505 + ) { + return false; + } + + let currentCount: number; + if (!retryData) { + throw new Error("retryData for the ExponentialRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {ExponentialRetryPolicy} policy The ExponentialRetryPolicy that this function is being called against. + * @param {RetryData} retryData The retry data. + * @param {RetryError} [err] The operation"s error, if any. + */ +function updateRetryData( + policy: ExponentialRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + + Math.floor(Math.random() * (policy.retryInterval * 1.2 - policy.retryInterval * 0.8)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +function retry( + policy: ExponentialRetryPolicy, + request: WebResourceLike, + response?: HttpOperationResponse, + retryData?: RetryData, + requestError?: RetryError +): Promise { + retryData = updateRetryData(policy, retryData, requestError); + const isAborted: boolean | undefined = request.abortSignal && request.abortSignal.aborted; + if (!isAborted && shouldRetry(policy, response && response.status, retryData)) { + return utils + .delay(retryData.retryInterval) + .then(() => policy._nextPolicy.sendRequest(request.clone())) + .then((res) => retry(policy, request, res, retryData, undefined)) + .catch((err) => retry(policy, request, response, retryData, err)); + } else if (isAborted || requestError || !response) { + // If the operation failed in the end, return all errors instead of just the last one + const err = + retryData.error || + new RestError( + "Failed to send the request.", + RestError.REQUEST_SEND_ERROR, + response && response.status, + response && response.request, + response + ); + return Promise.reject(err); + } else { + return Promise.resolve(response); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts new file mode 100644 index 0000000000..aa17a318fa --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/generateClientRequestIdPolicy.ts @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function generateClientRequestIdPolicy( + requestIdHeaderName = "x-ms-client-request-id" +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); + }, + }; +} + +export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + private _requestIdHeaderName: string + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.headers.contains(this._requestIdHeaderName)) { + request.headers.set(this._requestIdHeaderName, utils.generateUuid()); + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts new file mode 100644 index 0000000000..90d44ca8e2 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/logPolicy.ts @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function logPolicy(logger: any = console.log): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new LogPolicy(nextPolicy, options, logger); + }, + }; +} + +export class LogPolicy extends BaseRequestPolicy { + logger?: any; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + logger: any = console.log + ) { + super(nextPolicy, options); + this.logger = logger; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(request).then((response) => logResponse(this, response)); + } +} + +function logResponse( + policy: LogPolicy, + response: HttpOperationResponse +): Promise { + policy.logger(`>> Request: ${JSON.stringify(response.request, undefined, 2)}`); + policy.logger(`>> Response status code: ${response.status}`); + const responseBody = response.bodyAsText; + policy.logger(`>> Body: ${responseBody}`); + return Promise.resolve(response); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts new file mode 100644 index 0000000000..9b688a4a0a --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.browser.ts @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/* + * NOTE: When moving this file, please update "browser" section in package.json + * and "plugins" section in webpack.testconfig.ts. + */ + +import { TelemetryInfo } from "./userAgentPolicy"; + +interface NavigatorEx extends Navigator { + // oscpu is not yet standards-compliant, but can not be undefined in TypeScript 3.6.2 + readonly oscpu: string; +} + +export function getDefaultUserAgentKey(): string { + return "x-ms-command-name"; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const navigator = self.navigator as NavigatorEx; + const osInfo = { + key: "OS", + value: (navigator.oscpu || navigator.platform).replace(" ", ""), + }; + + return [osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts new file mode 100644 index 0000000000..ea523dd152 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/msRestUserAgentPolicy.ts @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as os from "os"; +import { TelemetryInfo } from "./userAgentPolicy"; +import { Constants } from "../util/constants"; + +export function getDefaultUserAgentKey(): string { + return Constants.HeaderConstants.USER_AGENT; +} + +export function getPlatformSpecificData(): TelemetryInfo[] { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + + const osInfo = { + key: "OS", + value: `(${os.arch()}-${os.type()}-${os.release()})`, + }; + + return [runtimeInfo, osInfo]; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts new file mode 100644 index 0000000000..a1c194d3cc --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.browser.ts @@ -0,0 +1,37 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ProxySettings } from "../serviceClient"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; + +const proxyNotSupportedInBrowser = new Error("ProxyPolicy is not supported in browser environment"); + +export function getDefaultProxySettings(_proxyUrl?: string): ProxySettings | undefined { + return undefined; +} + +export function proxyPolicy(_proxySettings?: ProxySettings): RequestPolicyFactory { + return { + create: (_nextPolicy: RequestPolicy, _options: RequestPolicyOptionsLike) => { + throw proxyNotSupportedInBrowser; + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) { + super(nextPolicy, options); + throw proxyNotSupportedInBrowser; + } + + public sendRequest(_request: WebResourceLike): Promise { + throw proxyNotSupportedInBrowser; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts new file mode 100644 index 0000000000..bda596084f --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/proxyPolicy.ts @@ -0,0 +1,168 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { ProxySettings } from "../serviceClient"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "../util/constants"; +import { URLBuilder } from "../url"; + +/** + * @internal + */ +export const noProxyList: string[] = loadNoProxy(); +const byPassedList: Map = new Map(); + +/** + * @internal + */ +export function getEnvironmentValue(name: string): string | undefined { + if (process.env[name]) { + return process.env[name]; + } else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} + +function loadEnvironmentProxyValue(): string | undefined { + if (!process) { + return undefined; + } + + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + + return httpsProxy || allProxy || httpProxy; +} + +// Check whether the host of a given `uri` is in the noProxyList. +// If there's a match, any request sent to the same host won't have the proxy settings set. +// This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 +function isBypassed(uri: string): boolean | undefined { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost()!; + if (byPassedList.has(host)) { + return byPassedList.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + byPassedList.set(host, isBypassedFlag); + return isBypassedFlag; +} + +/** + * @internal + */ +export function loadNoProxy(): string[] { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + + return []; +} + +/** + * @internal + */ +function extractAuthFromUrl( + url: string +): { username?: string; password?: string; urlWithoutAuth: string } { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} + +export function getDefaultProxySettings(proxyUrl?: string): ProxySettings | undefined { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, + }; +} + +export function proxyPolicy(proxySettings?: ProxySettings): RequestPolicyFactory { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ProxyPolicy(nextPolicy, options, proxySettings!); + }, + }; +} + +export class ProxyPolicy extends BaseRequestPolicy { + proxySettings: ProxySettings; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + proxySettings: ProxySettings + ) { + super(nextPolicy, options); + this.proxySettings = proxySettings; + } + + public sendRequest(request: WebResourceLike): Promise { + if (!request.proxySettings && !isBypassed(request.url)) { + request.proxySettings = this.proxySettings; + } + return this._nextPolicy.sendRequest(request); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts new file mode 100644 index 0000000000..fd158d105c --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/redirectPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { URLBuilder } from "../url"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +/** + * Options for how redirect responses are handled. + */ +export interface RedirectOptions { + /* + * When true, redirect responses are followed. Defaults to true. + */ + handleRedirects: boolean; + + /* + * The maximum number of times the redirect URL will be tried before + * failing. Defaults to 20. + */ + maxRetries?: number; +} + +export const DefaultRedirectOptions: RedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; + +export function redirectPolicy(maximumRetries = 20): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} + +export class RedirectPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly maxRetries = 20 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} + +function handleRedirect( + policy: RedirectPolicy, + response: HttpOperationResponse, + currentRetries: number +): Promise { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if ( + locationHeader && + (status === 300 || + (status === 301 && ["GET", "HEAD"].includes(request.method)) || + (status === 302 && ["GET", "POST", "HEAD"].includes(request.method)) || + (status === 303 && "POST" === request.method) || + status === 307) && + ((request.redirectLimit !== undefined && currentRetries < request.redirectLimit) || + (request.redirectLimit === undefined && currentRetries < policy.maxRetries)) + ) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + + // POST request with Status code 302 and 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + // reference: https://tools.ietf.org/html/rfc7231#page-57 && https://fetch.spec.whatwg.org/#http-redirect-fetch + if ((status === 302 || status === 303) && request.method === "POST") { + request.method = "GET"; + delete request.body; + } + + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)) + .then((res) => recordRedirect(res, request.url)); + } + + return Promise.resolve(response); +} + +function recordRedirect(response: HttpOperationResponse, redirect: string): HttpOperationResponse { + // This is called as the recursive calls to handleRedirect() unwind, + // only record the deepest/last redirect + if (!response.redirected) { + response.redirected = true; + response.url = redirect; + } + return response; +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts new file mode 100644 index 0000000000..d25d3146b7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/requestPolicy.ts @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import { HttpPipelineLogger } from "../httpPipelineLogger"; +import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; +import { WebResourceLike } from "../webResource"; + +/** + * Creates a new RequestPolicy per-request that uses the provided nextPolicy. + */ +export type RequestPolicyFactory = { + create(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike): RequestPolicy; +}; + +export interface RequestPolicy { + sendRequest(httpRequest: WebResourceLike): Promise; +} + +export abstract class BaseRequestPolicy implements RequestPolicy { + protected constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike + ) {} + + public abstract sendRequest(webResource: WebResourceLike): Promise; + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return this._options.shouldLog(logLevel); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + this._options.log(logLevel, message); + } +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export interface RequestPolicyOptionsLike { + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel: HttpPipelineLogLevel): boolean; + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meet the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + log(logLevel: HttpPipelineLogLevel, message: string): void; +} + +/** + * Optional properties that can be used when creating a RequestPolicy. + */ +export class RequestPolicyOptions implements RequestPolicyOptionsLike { + constructor(private _logger?: HttpPipelineLogger) {} + + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + public shouldLog(logLevel: HttpPipelineLogLevel): boolean { + return ( + !!this._logger && + logLevel !== HttpPipelineLogLevel.OFF && + logLevel <= this._logger.minimumLogLevel + ); + } + + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel The log level of this log. + * @param message The message of this log. + */ + public log(logLevel: HttpPipelineLogLevel, message: string): void { + if (this._logger && this.shouldLog(logLevel)) { + this._logger.log(logLevel, message); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts new file mode 100644 index 0000000000..47f11f3ead --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/rpRegistrationPolicy.ts @@ -0,0 +1,197 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function rpRegistrationPolicy(retryTimeout = 30): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); + }, + }; +} + +export class RPRegistrationPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + readonly _retryTimeout = 30 + ) { + super(nextPolicy, options); + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .then((response) => registerIfNeeded(this, request, response)); + } +} + +function registerIfNeeded( + policy: RPRegistrationPolicy, + request: WebResourceLike, + response: HttpOperationResponse +): Promise { + if (response.status === 409) { + const rpName = checkRPNotRegisteredError(response.bodyAsText as string); + if (rpName) { + const urlPrefix = extractSubscriptionUrl(request.url); + return ( + registerRP(policy, urlPrefix, rpName, request) + // Autoregistration of ${provider} failed for some reason. We will not return this error + // instead will return the initial response with 409 status code back to the user. + // do nothing here as we are returning the original response at the end of this method. + .catch(() => false) + .then((registrationStatus) => { + if (registrationStatus) { + // Retry the original request. We have to change the x-ms-client-request-id + // otherwise Azure endpoint will return the initial 409 (cached) response. + request.headers.set("x-ms-client-request-id", utils.generateUuid()); + return policy._nextPolicy.sendRequest(request.clone()); + } + return response; + }) + ); + } + } + + return Promise.resolve(response); +} + +/** + * Reuses the headers of the original request and url (if specified). + * @param {WebResourceLike} originalRequest The original request + * @param {boolean} reuseUrlToo Should the url from the original request be reused as well. Default false. + * @returns {object} A new request object with desired headers. + */ +function getRequestEssentials( + originalRequest: WebResourceLike, + reuseUrlToo = false +): WebResourceLike { + const reqOptions: WebResourceLike = originalRequest.clone(); + if (reuseUrlToo) { + reqOptions.url = originalRequest.url; + } + + // We have to change the x-ms-client-request-id otherwise Azure endpoint + // will return the initial 409 (cached) response. + reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); + + // Set content-type to application/json + reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); + + return reqOptions; +} + +/** + * Validates the error code and message associated with 409 response status code. If it matches to that of + * RP not registered then it returns the name of the RP else returns undefined. + * @param {string} body The response body received after making the original request. + * @returns {string} The name of the RP if condition is satisfied else undefined. + */ +function checkRPNotRegisteredError(body: string): string { + let result, responseBody; + if (body) { + try { + responseBody = JSON.parse(body); + } catch (err) { + // do nothing; + } + if ( + responseBody && + responseBody.error && + responseBody.error.message && + responseBody.error.code && + responseBody.error.code === "MissingSubscriptionRegistration" + ) { + const matchRes = responseBody.error.message.match(/.*'(.*)'/i); + if (matchRes) { + result = matchRes.pop(); + } + } + } + return result; +} + +/** + * Extracts the first part of the URL, just after subscription: + * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} url The original request url + * @returns {string} The url prefix as explained above. + */ +function extractSubscriptionUrl(url: string): string { + let result; + const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); + if (matchRes && matchRes[0]) { + result = matchRes[0]; + } else { + throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); + } + return result; +} + +/** + * Registers the given provider. + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} urlPrefix https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ + * @param {string} provider The provider name to be registered. + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @param {registrationCallback} callback The callback that handles the RP registration + */ +function registerRP( + policy: RPRegistrationPolicy, + urlPrefix: string, + provider: string, + originalRequest: WebResourceLike +): Promise { + const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; + const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; + const reqOptions = getRequestEssentials(originalRequest); + reqOptions.method = "POST"; + reqOptions.url = postUrl; + + return policy._nextPolicy.sendRequest(reqOptions).then((response) => { + if (response.status !== 200) { + throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); + } + return getRegistrationStatus(policy, getUrl, originalRequest); + }); +} + +/** + * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. + * Polling will happen till the registrationState property of the response body is "Registered". + * @param {RPRegistrationPolicy} policy The RPRegistrationPolicy this function is being called against. + * @param {string} url The request url for polling + * @param {WebResourceLike} originalRequest The original request sent by the user that returned a 409 response + * with a message that the provider is not registered. + * @returns {Promise} True if RP Registration is successful. + */ +function getRegistrationStatus( + policy: RPRegistrationPolicy, + url: string, + originalRequest: WebResourceLike +): Promise { + const reqOptions: any = getRequestEssentials(originalRequest); + reqOptions.url = url; + reqOptions.method = "GET"; + + return policy._nextPolicy.sendRequest(reqOptions).then((res) => { + const obj = res.parsedBody as any; + if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { + return true; + } else { + return utils + .delay(policy._retryTimeout * 1000) + .then(() => getRegistrationStatus(policy, url, originalRequest)); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts new file mode 100644 index 0000000000..91b3184321 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/signingPolicy.ts @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { ServiceClientCredentials } from "../credentials/serviceClientCredentials"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicyFactory, + RequestPolicy, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export function signingPolicy( + authenticationProvider: ServiceClientCredentials +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, + }; +} + +export class SigningPolicy extends BaseRequestPolicy { + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + public authenticationProvider: ServiceClientCredentials + ) { + super(nextPolicy, options); + } + + signRequest(request: WebResourceLike): Promise { + return this.authenticationProvider.signRequest(request); + } + + public sendRequest(request: WebResourceLike): Promise { + return this.signRequest(request).then((nextRequest) => + this._nextPolicy.sendRequest(nextRequest) + ); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts new file mode 100644 index 0000000000..9edc5eef6e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/systemErrorRetryPolicy.ts @@ -0,0 +1,187 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "../httpOperationResponse"; +import * as utils from "../util/utils"; +import { WebResourceLike } from "../webResource"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export interface RetryData { + retryCount: number; + retryInterval: number; + error?: RetryError; +} + +export interface RetryError extends Error { + message: string; + code?: string; + innerError?: RetryError; +} + +export function systemErrorRetryPolicy( + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new SystemErrorRetryPolicy( + nextPolicy, + options, + retryCount, + retryInterval, + minRetryInterval, + maxRetryInterval + ); + }, + }; +} + +/** + * @class + * Instantiates a new "ExponentialRetryPolicyFilter" instance. + * + * @constructor + * @param {number} retryCount The client retry count. + * @param {number} retryInterval The client retry interval, in milliseconds. + * @param {number} minRetryInterval The minimum retry interval, in milliseconds. + * @param {number} maxRetryInterval The maximum retry interval, in milliseconds. + */ +export class SystemErrorRetryPolicy extends BaseRequestPolicy { + retryCount: number; + retryInterval: number; + minRetryInterval: number; + maxRetryInterval: number; + DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; + DEFAULT_CLIENT_RETRY_COUNT = 3; + DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; + DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; + + constructor( + nextPolicy: RequestPolicy, + options: RequestPolicyOptionsLike, + retryCount?: number, + retryInterval?: number, + minRetryInterval?: number, + maxRetryInterval?: number + ) { + super(nextPolicy, options); + this.retryCount = typeof retryCount === "number" ? retryCount : this.DEFAULT_CLIENT_RETRY_COUNT; + this.retryInterval = + typeof retryInterval === "number" ? retryInterval : this.DEFAULT_CLIENT_RETRY_INTERVAL; + this.minRetryInterval = + typeof minRetryInterval === "number" + ? minRetryInterval + : this.DEFAULT_CLIENT_MIN_RETRY_INTERVAL; + this.maxRetryInterval = + typeof maxRetryInterval === "number" + ? maxRetryInterval + : this.DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + } + + public sendRequest(request: WebResourceLike): Promise { + return this._nextPolicy + .sendRequest(request.clone()) + .catch((error) => retry(this, request, error.response, error)); + } +} + +/** + * Determines if the operation should be retried and how long to wait until the next retry. + * + * @param {number} statusCode The HTTP status code. + * @param {RetryData} retryData The retry data. + * @return {boolean} True if the operation qualifies for a retry; false otherwise. + */ +function shouldRetry(policy: SystemErrorRetryPolicy, retryData: RetryData): boolean { + let currentCount; + if (!retryData) { + throw new Error("retryData for the SystemErrorRetryPolicyFilter cannot be null."); + } else { + currentCount = retryData && retryData.retryCount; + } + return currentCount < policy.retryCount; +} + +/** + * Updates the retry data for the next attempt. + * + * @param {RetryData} retryData The retry data. + * @param {object} err The operation"s error, if any. + */ +function updateRetryData( + policy: SystemErrorRetryPolicy, + retryData?: RetryData, + err?: RetryError +): RetryData { + if (!retryData) { + retryData = { + retryCount: 0, + retryInterval: 0, + }; + } + + if (err) { + if (retryData.error) { + err.innerError = retryData.error; + } + + retryData.error = err; + } + + // Adjust retry count + retryData.retryCount++; + + // Adjust retry interval + let incrementDelta = Math.pow(2, retryData.retryCount) - 1; + const boundedRandDelta = + policy.retryInterval * 0.8 + Math.floor(Math.random() * (policy.retryInterval * 0.4)); + incrementDelta *= boundedRandDelta; + + retryData.retryInterval = Math.min( + policy.minRetryInterval + incrementDelta, + policy.maxRetryInterval + ); + + return retryData; +} + +async function retry( + policy: SystemErrorRetryPolicy, + request: WebResourceLike, + operationResponse: HttpOperationResponse, + err?: RetryError, + retryData?: RetryData +): Promise { + retryData = updateRetryData(policy, retryData, err); + if ( + err && + err.code && + shouldRetry(policy, retryData) && + (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT") + ) { + // If previous operation ended with an error and the policy allows a retry, do that + try { + await utils.delay(retryData.retryInterval); + return policy._nextPolicy.sendRequest(request.clone()); + } catch (error) { + return retry(policy, request, operationResponse, error, retryData); + } + } else { + if (err) { + // If the operation failed in the end, return all errors instead of just the last one + return Promise.reject(retryData.error); + } + return operationResponse; + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts new file mode 100644 index 0000000000..20d77cddba --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/throttlingRetryPolicy.ts @@ -0,0 +1,105 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyOptionsLike, + RequestPolicyFactory, +} from "./requestPolicy"; +import { WebResourceLike } from "../webResource"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { delay } from "../util/utils"; + +const StatusCodes = Constants.HttpConstants.StatusCodes; +const DEFAULT_RETRY_COUNT = 3; + +/** + * Options that control how to retry on response status code 429. + */ +export interface ThrottlingRetryOptions { + /** + * The maximum number of retry attempts. Defaults to 3. + */ + maxRetries?: number; +} + +export function throttlingRetryPolicy( + maxRetries: number = DEFAULT_RETRY_COUNT +): RequestPolicyFactory { + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new ThrottlingRetryPolicy(nextPolicy, options, maxRetries); + }, + }; +} + +/** + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + */ +export class ThrottlingRetryPolicy extends BaseRequestPolicy { + private retryLimit: number; + + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike, retryLimit: number) { + super(nextPolicy, options); + this.retryLimit = retryLimit; + } + + public async sendRequest(httpRequest: WebResourceLike): Promise { + return this._nextPolicy.sendRequest(httpRequest.clone()).then((response) => { + return this.retry(httpRequest, response, 0); + }); + } + + private async retry( + httpRequest: WebResourceLike, + httpResponse: HttpOperationResponse, + retryCount: number + ): Promise { + if (httpResponse.status !== StatusCodes.TooManyRequests) { + return httpResponse; + } + + const retryAfterHeader: string | undefined = httpResponse.headers.get( + Constants.HeaderConstants.RETRY_AFTER + ); + + if (retryAfterHeader && retryCount < this.retryLimit) { + const delayInMs: number | undefined = ThrottlingRetryPolicy.parseRetryAfterHeader( + retryAfterHeader + ); + if (delayInMs) { + await delay(delayInMs); + const res = await this._nextPolicy.sendRequest(httpRequest); + return this.retry(httpRequest, res, retryCount + 1); + } + } + + return httpResponse; + } + + public static parseRetryAfterHeader(headerValue: string): number | undefined { + const retryAfterInSeconds = Number(headerValue); + if (Number.isNaN(retryAfterInSeconds)) { + return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); + } else { + return retryAfterInSeconds * 1000; + } + } + + public static parseDateRetryAfterHeader(headerValue: string): number | undefined { + try { + const now: number = Date.now(); + const date: number = Date.parse(headerValue); + const diff = date - now; + + return Number.isNaN(diff) ? undefined : diff; + } catch (error) { + return undefined; + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts new file mode 100644 index 0000000000..c4e70db967 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/policies/userAgentPolicy.ts @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders } from "../httpHeaders"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { Constants } from "../util/constants"; +import { WebResourceLike } from "../webResource"; +import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; +import { + BaseRequestPolicy, + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptionsLike, +} from "./requestPolicy"; + +export type TelemetryInfo = { key?: string; value?: string }; + +function getRuntimeInfo(): TelemetryInfo[] { + const msRestRuntime = { + key: "ms-rest-js", + value: Constants.msRestVersion, + }; + + return [msRestRuntime]; +} + +function getUserAgentString( + telemetryInfo: TelemetryInfo[], + keySeparator = " ", + valueSeparator = "/" +): string { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} + +export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; + +export function getDefaultUserAgentValue(): string { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} + +export function userAgentPolicy(userAgentData?: TelemetryInfo): RequestPolicyFactory { + const key: string = + !userAgentData || userAgentData.key == undefined ? getDefaultUserAgentKey() : userAgentData.key; + const value: string = + !userAgentData || userAgentData.value == undefined + ? getDefaultUserAgentValue() + : userAgentData.value; + + return { + create: (nextPolicy: RequestPolicy, options: RequestPolicyOptionsLike) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} + +export class UserAgentPolicy extends BaseRequestPolicy { + constructor( + readonly _nextPolicy: RequestPolicy, + readonly _options: RequestPolicyOptionsLike, + protected headerKey: string, + protected headerValue: string + ) { + super(_nextPolicy, _options); + } + + sendRequest(request: WebResourceLike): Promise { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + + addUserAgentHeader(request: WebResourceLike): void { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts new file mode 100644 index 0000000000..93dd369f06 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/proxyAgent.ts @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as http from "http"; +import * as https from "https"; +import * as tunnel from "tunnel"; + +import { ProxySettings } from "./serviceClient"; +import { URLBuilder } from "./url"; +import { HttpHeadersLike } from "./httpHeaders"; + +export type ProxyAgent = { isHttps: boolean; agent: http.Agent | https.Agent }; +export function createProxyAgent( + requestUrl: string, + proxySettings: ProxySettings, + headers?: HttpHeadersLike +): ProxyAgent { + const tunnelOptions: tunnel.HttpsOverHttpsOptions = { + proxy: { + host: URLBuilder.parse(proxySettings.host).getHost() as string, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } else if (proxySettings.username) { + tunnelOptions.proxy!.proxyAuth = `${proxySettings.username}`; + } + + const requestScheme = URLBuilder.parse(requestUrl).getScheme() || ""; + const isRequestHttps = requestScheme.toLowerCase() === "https"; + const proxyScheme = URLBuilder.parse(proxySettings.host).getScheme() || ""; + const isProxyHttps = proxyScheme.toLowerCase() === "https"; + + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + + return proxyAgent; +} + +// Duplicate tunnel.HttpsOverHttpsOptions to avoid exporting createTunnel() with dependency on @types/tunnel +// createIunnel() is only imported by tests. +export interface HttpsProxyOptions { + host: string; + port: number; + localAddress?: string; + proxyAuth?: string; + headers?: { [key: string]: any }; + ca?: Buffer[]; + servername?: string; + key?: Buffer; + cert?: Buffer; +} + +interface HttpsOverHttpsOptions { + maxSockets?: number; + ca?: Buffer[]; + key?: Buffer; + cert?: Buffer; + proxy?: HttpsProxyOptions; +} + +export function createTunnel( + isRequestHttps: boolean, + isProxyHttps: boolean, + tunnelOptions: HttpsOverHttpsOptions +): http.Agent | https.Agent { + if (isRequestHttps && isProxyHttps) { + return tunnel.httpsOverHttps(tunnelOptions); + } else if (isRequestHttps && !isProxyHttps) { + return tunnel.httpsOverHttp(tunnelOptions); + } else if (!isRequestHttps && isProxyHttps) { + return tunnel.httpOverHttps(tunnelOptions); + } else { + return tunnel.httpOverHttp(tunnelOptions); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts new file mode 100644 index 0000000000..ba6cca81d6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/queryCollectionFormat.ts @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +export enum QueryCollectionFormat { + Csv = ",", + Ssv = " ", + Tsv = "\t", + Pipes = "|", + Multi = "Multi", +} diff --git a/node_modules/@azure/ms-rest-js/lib/restError.ts b/node_modules/@azure/ms-rest-js/lib/restError.ts new file mode 100644 index 0000000000..88479c4467 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/restError.ts @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpOperationResponse } from "./httpOperationResponse"; +import { WebResourceLike } from "./webResource"; + +export class RestError extends Error { + static readonly REQUEST_SEND_ERROR: string = "REQUEST_SEND_ERROR"; + static readonly REQUEST_ABORTED_ERROR: string = "REQUEST_ABORTED_ERROR"; + static readonly PARSE_ERROR: string = "PARSE_ERROR"; + + code?: string; + statusCode?: number; + request?: WebResourceLike; + response?: HttpOperationResponse; + body?: any; + constructor( + message: string, + code?: string, + statusCode?: number, + request?: WebResourceLike, + response?: HttpOperationResponse, + body?: any + ) { + super(message); + this.code = code; + this.statusCode = statusCode; + this.request = request; + this.response = response; + this.body = body; + + Object.setPrototypeOf(this, RestError.prototype); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/serializer.ts b/node_modules/@azure/ms-rest-js/lib/serializer.ts new file mode 100644 index 0000000000..9ea94fcc63 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serializer.ts @@ -0,0 +1,1063 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as base64 from "./util/base64"; +import * as utils from "./util/utils"; + +export class Serializer { + constructor( + public readonly modelMappers: { [key: string]: any } = {}, + public readonly isXML?: boolean + ) {} + + validateConstraints(mapper: Mapper, value: any, objectName: string): void { + const failValidation = (constraintName: keyof MapperConstraints, constraintValue: any) => { + throw new Error( + `"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.` + ); + }; + if (mapper.constraints && value != undefined) { + const { + ExclusiveMaximum, + ExclusiveMinimum, + InclusiveMaximum, + InclusiveMinimum, + MaxItems, + MaxLength, + MinItems, + MinLength, + MultipleOf, + Pattern, + UniqueItems, + } = mapper.constraints; + if (ExclusiveMaximum != undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum != undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum != undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum != undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems != undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength != undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems != undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength != undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf != undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern: RegExp = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if ( + UniqueItems && + value.some((item: any, i: number, ar: Array) => ar.indexOf(item) !== i) + ) { + failValidation("UniqueItems", UniqueItems); + } + } + } + + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param {Mapper} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} object A valid Javascript object to be serialized + * + * @param {string} objectName Name of the serialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid serialized Javascript object + */ + serialize(mapper: Mapper, object: any, objectName?: string): any { + let payload: any = {}; + const mapperType = mapper.type.name as string; + if (!objectName) { + objectName = mapper.serializedName!; + } + if (mapperType.match(/^Sequence$/gi) !== null) { + payload = []; + } + + if (mapper.isConstant) { + object = mapper.defaultValue; + } + + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + + const { required, nullable } = mapper; + + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && object == undefined) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + + if (object == undefined) { + payload = object; + } else { + // Validate Constraints if any + this.validateConstraints(mapper, object, objectName); + if (mapperType.match(/^any$/gi) !== null) { + payload = object; + } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/gi) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } else if (mapperType.match(/^Enum$/gi) !== null) { + const enumMapper: EnumMapper = mapper as EnumMapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } else if ( + mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/gi) !== null + ) { + payload = serializeDateTypes(mapperType, object, objectName); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = serializeByteArrayType(objectName, object); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = serializeBase64UrlType(objectName, object); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = serializeSequenceType(this, mapper as SequenceMapper, object, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = serializeDictionaryType(this, mapper as DictionaryMapper, object, objectName); + } else if (mapperType.match(/^Composite$/gi) !== null) { + payload = serializeCompositeType(this, mapper as CompositeMapper, object, objectName); + } + } + return payload; + } + + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param {object} mapper The mapper which defines the metadata of the serializable object + * + * @param {object|string|Array|number|boolean|Date|stream} responseBody A valid Javascript entity to be deserialized + * + * @param {string} objectName Name of the deserialized object + * + * @returns {object|string|Array|number|boolean|Date|stream} A valid deserialized Javascript object + */ + deserialize(mapper: Mapper, responseBody: any, objectName: string): any { + if (responseBody == undefined) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + + let payload: any; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName!; + } + + if (mapperType.match(/^Composite$/gi) !== null) { + payload = deserializeCompositeType(this, mapper as CompositeMapper, responseBody, objectName); + } else { + if (this.isXML) { + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$") and body ("_") properties, then just reduce the responseBody value to + * the body ("_") property. + */ + if (responseBody["$"] != undefined && responseBody["_"] != undefined) { + responseBody = responseBody["_"]; + } + } + + if (mapperType.match(/^Number$/gi) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } else if (mapperType.match(/^Boolean$/gi) !== null) { + if (responseBody === "true") { + payload = true; + } else if (responseBody === "false") { + payload = false; + } else { + payload = responseBody; + } + } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/gi) !== null) { + payload = responseBody; + } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/gi) !== null) { + payload = new Date(responseBody); + } else if (mapperType.match(/^UnixTime$/gi) !== null) { + payload = unixTimeToDate(responseBody); + } else if (mapperType.match(/^ByteArray$/gi) !== null) { + payload = base64.decodeString(responseBody); + } else if (mapperType.match(/^Base64Url$/gi) !== null) { + payload = base64UrlToByteArray(responseBody); + } else if (mapperType.match(/^Sequence$/gi) !== null) { + payload = deserializeSequenceType(this, mapper as SequenceMapper, responseBody, objectName); + } else if (mapperType.match(/^Dictionary$/gi) !== null) { + payload = deserializeDictionaryType( + this, + mapper as DictionaryMapper, + responseBody, + objectName + ); + } + } + + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + + return payload; + } +} + +function trimEnd(str: string, ch: string) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} + +function bufferToBase64Url(buffer: any): string | undefined { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} + +function base64UrlToByteArray(str: string): Uint8Array | undefined { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/\-/g, "+").replace(/\_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} + +function splitSerializeName(prop: string | undefined): string[] { + const classes: string[] = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + + return classes; +} + +function dateToUnixTime(d: string | Date): number | undefined { + if (!d) { + return undefined; + } + + if (typeof d.valueOf() === "string") { + d = new Date(d as string); + } + return Math.floor((d as Date).getTime() / 1000); +} + +function unixTimeToDate(n: number): Date | undefined { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} + +function serializeBasicTypes(typeName: string, objectName: string, value: any): any { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/gi) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } else if (typeName.match(/^String$/gi) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } else if (typeName.match(/^Uuid$/gi) !== null) { + if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { + throw new Error( + `${objectName} with value "${value}" must be of type string and a valid uuid.` + ); + } + } else if (typeName.match(/^Boolean$/gi) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } else if (typeName.match(/^Stream$/gi) !== null) { + const objectType = typeof value; + if ( + objectType !== "string" && + objectType !== "function" && + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + !(typeof Blob === "function" && value instanceof Blob) + ) { + throw new Error( + `${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.` + ); + } + } + } + return value; +} + +function serializeEnumType(objectName: string, allowedValues: Array, value: any): any { + if (!allowedValues) { + throw new Error( + `Please provide a set of allowedValues to validate ${objectName} as an Enum Type.` + ); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error( + `${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify( + allowedValues + )}.` + ); + } + return value; +} + +function serializeByteArrayType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} + +function serializeBase64UrlType(objectName: string, value: any): any { + if (value != undefined) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} + +function serializeDateTypes(typeName: string, value: any, objectName: string) { + if (value != undefined) { + if (typeName.match(/^Date$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } else if (typeName.match(/^DateTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } else if (typeName.match(/^DateTimeRfc1123$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } else if (typeName.match(/^UnixTime$/gi) !== null) { + if ( + !( + value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))) + ) + ) { + throw new Error( + `${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.` + ); + } + value = dateToUnixTime(value); + } else if (typeName.match(/^TimeSpan$/gi) !== null) { + if (!utils.isDuration(value)) { + throw new Error( + `${objectName} must be a string in ISO 8601 format. Instead was "${value}".` + ); + } + value = value; + } + } + return value; +} + +function serializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + object: any, + objectName: string +) { + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + const elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + tempArray[i] = serializer.serialize(elementType, object[i], objectName); + } + return tempArray; +} + +function serializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + object: any, + objectName: string +) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.` + ); + } + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(object)) { + tempDictionary[key] = serializer.serialize(valueType, object[key], objectName + "." + key); + } + return tempDictionary; +} + +/** + * Resolves a composite mapper's modelProperties. + * @param serializer the serializer containing the entire set of mappers + * @param mapper the composite mapper to resolve + */ +function resolveModelProperties( + serializer: Serializer, + mapper: CompositeMapper, + objectName: string +): { [propertyName: string]: Mapper } { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const className = mapper.type.className; + if (!className) { + throw new Error( + `Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify( + mapper, + undefined, + 2 + )}".` + ); + } + + const modelMapper = serializer.modelMappers[className]; + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${className}".`); + } + modelProps = modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error( + `modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify( + modelMapper + )}" of type "${className}" for object "${objectName}".` + ); + } + } + + return modelProps; +} + +function serializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + objectName: string +) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + + if (object != undefined) { + const payload: any = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + + let propName: string | undefined; + let parentObject: any = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } else { + const paths = splitSerializeName(propertyMapper.serializedName!); + propName = paths.pop(); + + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if (childObject == undefined && object[key] != undefined) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + + if (parentObject != undefined) { + const propertyObjectName = + propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if ( + polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + toSerialize == undefined + ) { + toSerialize = mapper.serializedName; + } + + const serializedValue = serializer.serialize( + propertyMapper, + toSerialize, + propertyObjectName + ); + if (serializedValue !== undefined && propName != undefined) { + if (propertyMapper.xmlIsAttribute) { + // $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject.$ = parentObject.$ || {}; + parentObject.$[propName] = serializedValue; + } else if (propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName!]: serializedValue }; + } else { + parentObject[propName] = serializedValue; + } + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize( + additionalPropertiesMapper, + object[clientPropName], + objectName + '["' + clientPropName + '"]' + ); + } + } + } + + return payload; + } + return object; +} + +function isSpecialXmlProperty(propertyName: string): boolean { + return ["$", "_"].includes(propertyName); +} + +function deserializeCompositeType( + serializer: Serializer, + mapper: CompositeMapper, + responseBody: any, + objectName: string +): any { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance: { [key: string]: any } = {}; + const handledPropertyNames: string[] = []; + + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName!); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + + const headerCollectionPrefix = (propertyMapper as DictionaryMapper).headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary: any = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize( + (propertyMapper as DictionaryMapper).type.value, + responseBody[headerKey], + propertyObjectName + ); + } + + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody.$) { + instance[key] = serializer.deserialize( + propertyMapper, + responseBody.$[xmlName!], + propertyObjectName + ); + } else { + const propertyName = xmlElementName || xmlName || serializedName; + let unwrappedProperty = responseBody[propertyName!]; + if (propertyMapper.xmlIsWrapped) { + unwrappedProperty = responseBody[xmlName!]; + unwrappedProperty = unwrappedProperty && unwrappedProperty[xmlElementName!]; + + const isEmptyWrappedList = unwrappedProperty === undefined; + if (isEmptyWrappedList) { + unwrappedProperty = []; + } + } + instance[key] = serializer.deserialize( + propertyMapper, + unwrappedProperty, + propertyObjectName + ); + } + } else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + for (const item of paths) { + if (!res) break; + res = res[item]; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if ( + polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + propertyInstance == undefined + ) { + propertyInstance = mapper.serializedName; + } + + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [key, value] of Object.entries(instance)) { + if (!arrayInstance.hasOwnProperty(key)) { + arrayInstance[key] = value; + } + } + instance = arrayInstance; + } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize( + propertyMapper, + propertyInstance, + propertyObjectName + ); + instance[key] = serializedValue; + } + } + } + + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName: string) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize( + additionalPropertiesMapper, + responseBody[responsePropName], + objectName + '["' + responsePropName + '"]' + ); + } + } + } else if (responseBody) { + for (const key of Object.keys(responseBody)) { + if ( + instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key) + ) { + instance[key] = responseBody[key]; + } + } + } + + return instance; +} + +function deserializeDictionaryType( + serializer: Serializer, + mapper: DictionaryMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error( + `"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + const tempDictionary: { [key: string]: any } = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName); + } + return tempDictionary; + } + return responseBody; +} + +function deserializeSequenceType( + serializer: Serializer, + mapper: SequenceMapper, + responseBody: any, + objectName: string +): any { + /*jshint validthis: true */ + const element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error( + `element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}` + ); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`); + } + return tempArray; + } + return responseBody; +} + +function getPolymorphicMapper( + serializer: Serializer, + mapper: CompositeMapper, + object: any, + polymorphicPropertyName: "clientName" | "serializedName" +): CompositeMapper { + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName != undefined) { + const discriminatorValue = object[discriminatorName]; + if (discriminatorValue != undefined) { + const typeName = mapper.type.uberParent || mapper.type.className; + const indexDiscriminator = + discriminatorValue === typeName + ? discriminatorValue + : typeName + "." + discriminatorValue; + const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} + +function getPolymorphicDiscriminatorRecursively( + serializer: Serializer, + mapper: CompositeMapper +): PolymorphicDiscriminator | undefined { + return ( + mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className) + ); +} + +function getPolymorphicDiscriminatorSafely(serializer: Serializer, typeName?: string) { + return ( + typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator + ); +} + +export interface MapperConstraints { + InclusiveMaximum?: number; + ExclusiveMaximum?: number; + InclusiveMinimum?: number; + ExclusiveMinimum?: number; + MaxLength?: number; + MinLength?: number; + Pattern?: RegExp; + MaxItems?: number; + MinItems?: number; + UniqueItems?: true; + MultipleOf?: number; +} + +export type MapperType = + | SimpleMapperType + | CompositeMapperType + | SequenceMapperType + | DictionaryMapperType + | EnumMapperType; + +export interface SimpleMapperType { + name: + | "Base64Url" + | "Boolean" + | "ByteArray" + | "Date" + | "DateTime" + | "DateTimeRfc1123" + | "Object" + | "Stream" + | "String" + | "TimeSpan" + | "UnixTime" + | "Uuid" + | "Number" + | "any"; +} + +export interface CompositeMapperType { + name: "Composite"; + + // Only one of the two below properties should be present. + // Use className to reference another type definition, + // and use modelProperties/additionalProperties when the reference to the other type has been resolved. + className?: string; + + modelProperties?: { [propertyName: string]: Mapper }; + additionalProperties?: Mapper; + + uberParent?: string; + polymorphicDiscriminator?: PolymorphicDiscriminator; +} + +export interface SequenceMapperType { + name: "Sequence"; + element: Mapper; +} + +export interface DictionaryMapperType { + name: "Dictionary"; + value: Mapper; +} + +export interface EnumMapperType { + name: "Enum"; + allowedValues: any[]; +} + +export interface BaseMapper { + xmlName?: string; + xmlIsAttribute?: boolean; + xmlElementName?: string; + xmlIsWrapped?: boolean; + readOnly?: boolean; + isConstant?: boolean; + required?: boolean; + nullable?: boolean; + serializedName?: string; + type: MapperType; + defaultValue?: any; + constraints?: MapperConstraints; +} + +export type Mapper = BaseMapper | CompositeMapper | SequenceMapper | DictionaryMapper | EnumMapper; + +export interface PolymorphicDiscriminator { + serializedName: string; + clientName: string; + [key: string]: string; +} + +export interface CompositeMapper extends BaseMapper { + type: CompositeMapperType; +} + +export interface SequenceMapper extends BaseMapper { + type: SequenceMapperType; +} + +export interface DictionaryMapper extends BaseMapper { + type: DictionaryMapperType; + headerCollectionPrefix?: string; +} + +export interface EnumMapper extends BaseMapper { + type: EnumMapperType; +} + +export interface UrlParameterValue { + value: string; + skipUrlEncoding: boolean; +} + +// TODO: why is this here? +export function serializeObject(toSerialize: any): any { + if (toSerialize == undefined) return undefined; + if (toSerialize instanceof Uint8Array) { + toSerialize = base64.encodeByteArray(toSerialize); + return toSerialize; + } else if (toSerialize instanceof Date) { + return toSerialize.toISOString(); + } else if (Array.isArray(toSerialize)) { + const array = []; + for (let i = 0; i < toSerialize.length; i++) { + array.push(serializeObject(toSerialize[i])); + } + return array; + } else if (typeof toSerialize === "object") { + const dictionary: { [key: string]: any } = {}; + for (const property in toSerialize) { + dictionary[property] = serializeObject(toSerialize[property]); + } + return dictionary; + } + return toSerialize; +} + +/** + * Utility function to create a K:V from a list of strings + */ +function strEnum(o: Array): { [K in T]: K } { + const result: any = {}; + for (const key of o) { + result[key] = key; + } + return result; +} + +export const MapperType = strEnum([ + "Base64Url", + "Boolean", + "ByteArray", + "Composite", + "Date", + "DateTime", + "DateTimeRfc1123", + "Dictionary", + "Enum", + "Number", + "Object", + "Sequence", + "String", + "Stream", + "TimeSpan", + "UnixTime", +]); diff --git a/node_modules/@azure/ms-rest-js/lib/serviceClient.ts b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts new file mode 100644 index 0000000000..e6e321850d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/serviceClient.ts @@ -0,0 +1,852 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { TokenCredential, isTokenCredential } from "@azure/core-auth"; +import { ServiceClientCredentials } from "./credentials/serviceClientCredentials"; +import { DefaultHttpClient } from "./defaultHttpClient"; +import { HttpClient } from "./httpClient"; +import { HttpOperationResponse, RestResponse } from "./httpOperationResponse"; +import { HttpPipelineLogger } from "./httpPipelineLogger"; +import { OperationArguments } from "./operationArguments"; +import { + getPathStringFromParameter, + getPathStringFromParameterPath, + OperationParameter, + ParameterPath, +} from "./operationParameter"; +import { isStreamOperation, OperationSpec } from "./operationSpec"; +import { + deserializationPolicy, + DeserializationContentTypes, +} from "./policies/deserializationPolicy"; +import { exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; +import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; +import { + userAgentPolicy, + getDefaultUserAgentHeaderName, + getDefaultUserAgentValue, +} from "./policies/userAgentPolicy"; +import { DefaultRedirectOptions, RedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; +import { + RequestPolicy, + RequestPolicyFactory, + RequestPolicyOptions, + RequestPolicyOptionsLike, +} from "./policies/requestPolicy"; +import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; +import { signingPolicy } from "./policies/signingPolicy"; +import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; +import { QueryCollectionFormat } from "./queryCollectionFormat"; +import { CompositeMapper, DictionaryMapper, Mapper, MapperType, Serializer } from "./serializer"; +import { URLBuilder } from "./url"; +import * as utils from "./util/utils"; +import { stringifyXML } from "./util/xml"; +import { + RequestOptionsBase, + RequestPrepareOptions, + WebResourceLike, + isWebResourceLike, + WebResource, +} from "./webResource"; +import { OperationResponse } from "./operationResponse"; +import { ServiceCallback } from "./util/utils"; +import { agentPolicy } from "./policies/agentPolicy"; +import { proxyPolicy, getDefaultProxySettings } from "./policies/proxyPolicy"; +import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; +import { Agent } from "http"; +import { + AzureIdentityCredentialAdapter, + azureResourceManagerEndpoints, +} from "./credentials/azureIdentityTokenCredentialAdapter"; + +/** + * HTTP proxy settings (Node.js only) + */ +export interface ProxySettings { + host: string; + port: number; + username?: string; + password?: string; +} + +/** + * HTTP and HTTPS agents (Node.js only) + */ +export interface AgentSettings { + http: Agent; + https: Agent; +} + +/** + * Options to be provided while creating the client. + */ +export interface ServiceClientOptions { + /** + * An array of factories which get called to create the RequestPolicy pipeline used to send a HTTP + * request on the wire, or a function that takes in the defaultRequestPolicyFactories and returns + * the requestPolicyFactories that will be used. + */ + requestPolicyFactories?: + | RequestPolicyFactory[] + | ((defaultRequestPolicyFactories: RequestPolicyFactory[]) => void | RequestPolicyFactory[]); + /** + * The HttpClient that will be used to send HTTP requests. + */ + httpClient?: HttpClient; + /** + * The HttpPipelineLogger that can be used to debug RequestPolicies within the HTTP pipeline. + */ + httpPipelineLogger?: HttpPipelineLogger; + /** + * If set to true, turn off the default retry policy. + */ + noRetryPolicy?: boolean; + /** + * Gets or sets the retry timeout in seconds for AutomaticRPRegistration. Default value is 30. + */ + rpRegistrationRetryTimeout?: number; + /** + * Whether or not to generate a client request ID header for each HTTP request. + */ + generateClientRequestIdHeader?: boolean; + /** + * Whether to include credentials in CORS requests in the browser. + * See https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest/withCredentials for more information. + */ + withCredentials?: boolean; + /** + * If specified, a GenerateRequestIdPolicy will be added to the HTTP pipeline that will add a + * header to all outgoing requests with this header name and a random UUID as the request ID. + */ + clientRequestIdHeaderName?: string; + /** + * The content-types that will be associated with JSON or XML serialization. + */ + deserializationContentTypes?: DeserializationContentTypes; + /** + * The header name to use for the telemetry header while sending the request. If this is not + * specified, then "User-Agent" will be used when running on Node.js and "x-ms-command-name" will + * be used when running in a browser. + */ + userAgentHeaderName?: string | ((defaultUserAgentHeaderName: string) => string); + /** + * The string to be set to the telemetry header while sending the request, or a function that + * takes in the default user-agent string and returns the user-agent string that will be used. + */ + userAgent?: string | ((defaultUserAgent: string) => string); + /** + * Proxy settings which will be used for every HTTP request (Node.js only). + */ + proxySettings?: ProxySettings; + /** + * Options for how redirect responses are handled. + */ + redirectOptions?: RedirectOptions; + /** + * HTTP and HTTPS agents which will be used for every HTTP request (Node.js only). + */ + agentSettings?: AgentSettings; + /** + * If specified: + * - This `baseUri` becomes the base URI that requests will be made against for this ServiceClient. + * - If the `baseUri` matches a known resource manager endpoint and if a `TokenCredential` was passed through the constructor, this `baseUri` defines the `getToken` scope to be `${options.baseUri}/.default`. Otherwise, the scope would default to "https://management.azure.com/.default". + * + * If it is not specified: + * - All OperationSpecs must contain a baseUrl property. + * - If a `TokenCredential` was passed through the constructor, the `getToken` scope is set to be "https://management.azure.com/.default". + */ + baseUri?: string; +} + +/** + * @class + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The base URI against which requests will be made when using this ServiceClient instance. + * + * This can be set either by setting the `baseUri` in the `options` parameter to the ServiceClient constructor or directly after constructing the ServiceClient. + * If set via the ServiceClient constructor when using the overload that takes the `TokenCredential`, and if it matches a known resource manager endpoint, this base URI sets the scope used to get the AAD token to `${baseUri}/.default` instead of the default "https://management.azure.com/.default" + * + * If it is not specified, all OperationSpecs must contain a baseUrl property. + */ + protected baseUri?: string; + + /** + * The default request content type for the service. + * Used if no requestContentType is present on an OperationSpec. + */ + protected requestContentType?: string; + + /** + * The HTTP client that will be used to send requests. + */ + private readonly _httpClient: HttpClient; + private readonly _requestPolicyOptions: RequestPolicyOptionsLike; + + private readonly _requestPolicyFactories: RequestPolicyFactory[]; + private readonly _withCredentials: boolean; + + /** + * The ServiceClient constructor + * @constructor + * @param {ServiceClientCredentials} [credentials] The credentials object used for authentication. + * @param {ServiceClientOptions} [options] The service client options that govern the behavior of the client. + */ + constructor( + credentials?: ServiceClientCredentials | TokenCredential, + options?: ServiceClientOptions + ) { + if (!options) { + options = {}; + } + + if (options.baseUri) { + this.baseUri = options.baseUri; + } + + let serviceClientCredentials: ServiceClientCredentials | undefined; + if (isTokenCredential(credentials)) { + let scope: string | undefined = undefined; + if (options?.baseUri && azureResourceManagerEndpoints.includes(options?.baseUri)) { + scope = `${options.baseUri}/.default`; + } + serviceClientCredentials = new AzureIdentityCredentialAdapter(credentials, scope); + } else { + serviceClientCredentials = credentials; + } + + if (serviceClientCredentials && !serviceClientCredentials.signRequest) { + throw new Error("credentials argument needs to implement signRequest method"); + } + + this._withCredentials = options.withCredentials || false; + this._httpClient = options.httpClient || new DefaultHttpClient(); + this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); + + let requestPolicyFactories: RequestPolicyFactory[]; + if (Array.isArray(options.requestPolicyFactories)) { + requestPolicyFactories = options.requestPolicyFactories; + } else { + requestPolicyFactories = createDefaultRequestPolicyFactories( + serviceClientCredentials, + options + ); + if (options.requestPolicyFactories) { + const newRequestPolicyFactories: + | void + | RequestPolicyFactory[] = options.requestPolicyFactories(requestPolicyFactories); + if (newRequestPolicyFactories) { + requestPolicyFactories = newRequestPolicyFactories; + } + } + } + this._requestPolicyFactories = requestPolicyFactories; + } + + /** + * Send the provided httpRequest. + */ + sendRequest(options: RequestPrepareOptions | WebResourceLike): Promise { + if (options === null || options === undefined || typeof options !== "object") { + throw new Error("options cannot be null or undefined and it must be of type object."); + } + + let httpRequest: WebResourceLike; + try { + if (isWebResourceLike(options)) { + options.validateRequestProperties(); + httpRequest = options; + } else { + httpRequest = new WebResource(); + httpRequest = httpRequest.prepare(options); + } + } catch (error) { + return Promise.reject(error); + } + + let httpPipeline: RequestPolicy = this._httpClient; + if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { + for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { + httpPipeline = this._requestPolicyFactories[i].create( + httpPipeline, + this._requestPolicyOptions + ); + } + } + return httpPipeline.sendRequest(httpRequest); + } + + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @param {OperationArguments} operationArguments The arguments that the HTTP request's templated values will be populated from. + * @param {OperationSpec} operationSpec The OperationSpec to use to populate the httpRequest. + * @param {ServiceCallback} callback The callback to call when the response is received. + */ + sendOperationRequest( + operationArguments: OperationArguments, + operationSpec: OperationSpec, + callback?: ServiceCallback + ): Promise { + if (typeof operationArguments.options === "function") { + callback = operationArguments.options; + operationArguments.options = undefined; + } + + const httpRequest = new WebResource(); + + let result: Promise; + try { + const baseUri: string | undefined = operationSpec.baseUrl || this.baseUri; + if (!baseUri) { + throw new Error( + "If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use." + ); + } + + httpRequest.method = operationSpec.httpMethod; + httpRequest.operationSpec = operationSpec; + + const requestUrl: URLBuilder = URLBuilder.parse(baseUri); + if (operationSpec.path) { + requestUrl.appendPath(operationSpec.path); + } + if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue: string = getOperationArgumentValueFromParameter( + this, + operationArguments, + urlParameter, + operationSpec.serializer + ); + urlParameterValue = operationSpec.serializer.serialize( + urlParameter.mapper, + urlParameterValue, + getPathStringFromParameter(urlParameter) + ); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + requestUrl.replaceAll( + `{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, + urlParameterValue + ); + } + } + if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { + for (const queryParameter of operationSpec.queryParameters) { + let queryParameterValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + queryParameter, + operationSpec.serializer + ); + if (queryParameterValue != undefined) { + queryParameterValue = operationSpec.serializer.serialize( + queryParameter.mapper, + queryParameterValue, + getPathStringFromParameter(queryParameter) + ); + if (queryParameter.collectionFormat != undefined) { + if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { + if (queryParameterValue.length === 0) { + queryParameterValue = ""; + } else { + for (const index in queryParameterValue) { + const item = queryParameterValue[index]; + queryParameterValue[index] = item == undefined ? "" : item.toString(); + } + } + } else if ( + queryParameter.collectionFormat === QueryCollectionFormat.Ssv || + queryParameter.collectionFormat === QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + for (const index in queryParameterValue) { + if ( + queryParameterValue[index] !== undefined && + queryParameterValue[index] !== null + ) { + queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); + } + } + } else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + if ( + queryParameter.collectionFormat != undefined && + queryParameter.collectionFormat !== QueryCollectionFormat.Multi && + queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && + queryParameter.collectionFormat !== QueryCollectionFormat.Tsv + ) { + queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); + } + requestUrl.setQueryParameter( + queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), + queryParameterValue + ); + } + } + } + httpRequest.url = requestUrl.toString(); + + const contentType = operationSpec.contentType || this.requestContentType; + if (contentType) { + httpRequest.headers.set("Content-Type", contentType); + } + + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue: any = getOperationArgumentValueFromParameter( + this, + operationArguments, + headerParameter, + operationSpec.serializer + ); + if (headerValue != undefined) { + headerValue = operationSpec.serializer.serialize( + headerParameter.mapper, + headerValue, + getPathStringFromParameter(headerParameter) + ); + const headerCollectionPrefix = (headerParameter.mapper as DictionaryMapper) + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } else { + httpRequest.headers.set( + headerParameter.mapper.serializedName || + getPathStringFromParameter(headerParameter), + headerValue + ); + } + } + } + } + + const options: RequestOptionsBase | undefined = operationArguments.options; + if (options) { + if (options.customHeaders) { + for (const customHeaderName in options.customHeaders) { + httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); + } + } + + if (options.abortSignal) { + httpRequest.abortSignal = options.abortSignal; + } + + if (options.timeout) { + httpRequest.timeout = options.timeout; + } + + if (options.onUploadProgress) { + httpRequest.onUploadProgress = options.onUploadProgress; + } + + if (options.onDownloadProgress) { + httpRequest.onDownloadProgress = options.onDownloadProgress; + } + } + + httpRequest.withCredentials = this._withCredentials; + + serializeRequestBody(this, httpRequest, operationArguments, operationSpec); + + if (httpRequest.streamResponseBody == undefined) { + httpRequest.streamResponseBody = isStreamOperation(operationSpec); + } + + result = this.sendRequest(httpRequest).then((res) => + flattenResponse(res, operationSpec.responses[res.status]) + ); + } catch (error) { + result = Promise.reject(error); + } + + const cb = callback; + if (cb) { + result + // tslint:disable-next-line:no-null-keyword + .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) + .catch((err) => cb(err)); + } + + return result; + } +} + +export function serializeRequestBody( + serviceClient: ServiceClient, + httpRequest: WebResourceLike, + operationArguments: OperationArguments, + operationSpec: OperationSpec +): void { + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + httpRequest.body = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + operationSpec.requestBody, + operationSpec.serializer + ); + + const bodyMapper = operationSpec.requestBody.mapper; + const { required, xmlName, xmlElementName, serializedName } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if (httpRequest.body != undefined || required) { + const requestBodyParameterPathString: string = getPathStringFromParameter( + operationSpec.requestBody + ); + httpRequest.body = operationSpec.serializer.serialize( + bodyMapper, + httpRequest.body, + requestBodyParameterPathString + ); + const isStream = typeName === MapperType.Stream; + if (operationSpec.isXML) { + if (typeName === MapperType.Sequence) { + httpRequest.body = stringifyXML( + utils.prepareXMLRootList( + httpRequest.body, + xmlElementName || xmlName || serializedName! + ), + { rootName: xmlName || serializedName } + ); + } else if (!isStream) { + httpRequest.body = stringifyXML(httpRequest.body, { + rootName: xmlName || serializedName, + }); + } + } else if (!isStream) { + httpRequest.body = JSON.stringify(httpRequest.body); + } + } + } catch (error) { + throw new Error( + `Error "${error.message}" occurred in serializing the payload - ${JSON.stringify( + serializedName, + undefined, + " " + )}.` + ); + } + } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + httpRequest.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue: any = getOperationArgumentValueFromParameter( + serviceClient, + operationArguments, + formDataParameter, + operationSpec.serializer + ); + if (formDataParameterValue != undefined) { + const formDataParameterPropertyName: string = + formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize( + formDataParameter.mapper, + formDataParameterValue, + getPathStringFromParameter(formDataParameter) + ); + } + } + } +} + +function isRequestPolicyFactory(instance: any): instance is RequestPolicyFactory { + return typeof instance.create === "function"; +} + +function getValueOrFunctionResult( + value: undefined | string | ((defaultValue: string) => string), + defaultValueCreator: () => string +): string { + let result: string; + if (typeof value === "string") { + result = value; + } else { + result = defaultValueCreator(); + if (typeof value === "function") { + result = value(result); + } + } + return result; +} + +function createDefaultRequestPolicyFactories( + credentials: ServiceClientCredentials | RequestPolicyFactory | undefined, + options: ServiceClientOptions +): RequestPolicyFactory[] { + const factories: RequestPolicyFactory[] = []; + + if (options.generateClientRequestIdHeader) { + factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); + } + + if (credentials) { + if (isRequestPolicyFactory(credentials)) { + factories.push(credentials); + } else { + factories.push(signingPolicy(credentials)); + } + } + + const userAgentHeaderName: string = getValueOrFunctionResult( + options.userAgentHeaderName, + getDefaultUserAgentHeaderName + ); + const userAgentHeaderValue: string = getValueOrFunctionResult( + options.userAgent, + getDefaultUserAgentValue + ); + if (userAgentHeaderName && userAgentHeaderValue) { + factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); + } + + const redirectOptions = { + ...DefaultRedirectOptions, + ...options.redirectOptions, + }; + if (redirectOptions.handleRedirects) { + factories.push(redirectPolicy(redirectOptions.maxRetries)); + } + + factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); + + if (!options.noRetryPolicy) { + factories.push(exponentialRetryPolicy()); + factories.push(systemErrorRetryPolicy()); + factories.push(throttlingRetryPolicy()); + } + + factories.push(deserializationPolicy(options.deserializationContentTypes)); + + const proxySettings = options.proxySettings || getDefaultProxySettings(); + if (proxySettings) { + factories.push(proxyPolicy(proxySettings)); + } + + if (options.agentSettings) { + factories.push(agentPolicy(options.agentSettings)); + } + + return factories; +} + +export type PropertyParent = { [propertyName: string]: any }; + +/** + * Get the property parent for the property at the provided path when starting with the provided + * parent object. + */ +export function getPropertyParent(parent: PropertyParent, propertyPath: string[]): PropertyParent { + if (parent && propertyPath) { + const propertyPathLength: number = propertyPath.length; + for (let i = 0; i < propertyPathLength - 1; ++i) { + const propertyName: string = propertyPath[i]; + if (!parent[propertyName]) { + parent[propertyName] = {}; + } + parent = parent[propertyName]; + } + } + return parent; +} + +function getOperationArgumentValueFromParameter( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameter: OperationParameter, + serializer: Serializer +): any { + return getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + parameter.parameterPath, + parameter.mapper, + serializer + ); +} + +export function getOperationArgumentValueFromParameterPath( + serviceClient: ServiceClient, + operationArguments: OperationArguments, + parameterPath: ParameterPath, + parameterMapper: Mapper, + serializer: Serializer +): any { + let value: any; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } else { + let propertySearchResult: PropertySearchResult = getPropertyFromParameterPath( + operationArguments, + parameterPath + ); + if (!propertySearchResult.propertyFound) { + propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); + } + + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + + // Serialize just for validation purposes. + const parameterPathString: string = getPathStringFromParameterPath( + parameterPath, + parameterMapper + ); + serializer.serialize(parameterMapper, value, parameterPathString); + } + } else { + if (parameterMapper.required) { + value = {}; + } + + for (const propertyName in parameterPath) { + const propertyMapper: Mapper = (parameterMapper as CompositeMapper).type.modelProperties![ + propertyName + ]; + const propertyPath: ParameterPath = parameterPath[propertyName]; + const propertyValue: any = getOperationArgumentValueFromParameterPath( + serviceClient, + operationArguments, + propertyPath, + propertyMapper, + serializer + ); + // Serialize just for validation purposes. + const propertyPathString: string = getPathStringFromParameterPath( + propertyPath, + propertyMapper + ); + serializer.serialize(propertyMapper, propertyValue, propertyPathString); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} + +interface PropertySearchResult { + propertyValue?: any; + propertyFound: boolean; +} + +function getPropertyFromParameterPath( + parent: { [parameterName: string]: any }, + parameterPath: string[] +): PropertySearchResult { + const result: PropertySearchResult = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart: string = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent != undefined && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} + +export function flattenResponse( + _response: HttpOperationResponse, + responseSpec: OperationResponse | undefined +): RestResponse { + const parsedHeaders = _response.parsedHeaders; + const bodyMapper = responseSpec && responseSpec.bodyMapper; + + const addOperationResponse = (obj: {}) => + Object.defineProperty(obj, "_response", { + value: _response, + }); + + if (bodyMapper) { + const typeName = bodyMapper.type.name; + if (typeName === "Stream") { + return addOperationResponse({ + ...parsedHeaders, + blobBody: _response.blobBody, + readableStreamBody: _response.readableStreamBody, + }); + } + + const modelProperties = + (typeName === "Composite" && (bodyMapper as CompositeMapper).type.modelProperties) || {}; + const isPageableResponse = Object.keys(modelProperties).some( + (k) => modelProperties[k].serializedName === "" + ); + if (typeName === "Sequence" || isPageableResponse) { + // We're expecting a sequece(array) make sure that the response body is in the + // correct format, if not make it an empty array [] + const parsedBody = Array.isArray(_response.parsedBody) ? _response.parsedBody : []; + const arrayResponse = [...parsedBody] as RestResponse & any[]; + + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = _response.parsedBody[key]; + } + } + + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + addOperationResponse(arrayResponse); + return arrayResponse; + } + + if (typeName === "Composite" || typeName === "Dictionary") { + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); + } + } + + if ( + bodyMapper || + _response.request.method === "HEAD" || + utils.isPrimitiveType(_response.parsedBody) + ) { + // primitive body types and HEAD booleans + return addOperationResponse({ + ...parsedHeaders, + body: _response.parsedBody, + }); + } + + return addOperationResponse({ + ...parsedHeaders, + ..._response.parsedBody, + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/url.ts b/node_modules/@azure/ms-rest-js/lib/url.ts new file mode 100644 index 0000000000..74d55028a1 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/url.ts @@ -0,0 +1,659 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { replaceAll } from "./util/utils"; + +type URLQueryParseState = "ParameterName" | "ParameterValue" | "Invalid"; + +/** + * A class that handles the query portion of a URLBuilder. + */ +export class URLQuery { + private readonly _rawQuery: { [queryParameterName: string]: string | string[] } = {}; + + /** + * Get whether or not there any query parameters in this URLQuery. + */ + public any(): boolean { + return Object.keys(this._rawQuery).length > 0; + } + + /** + * Set a query parameter with the provided name and value. If the parameterValue is undefined or + * empty, then this will attempt to remove an existing query parameter with the provided + * parameterName. + */ + public set(parameterName: string, parameterValue: any): void { + if (parameterName) { + if (parameterValue != undefined) { + const newValue = Array.isArray(parameterValue) ? parameterValue : parameterValue.toString(); + this._rawQuery[parameterName] = newValue; + } else { + delete this._rawQuery[parameterName]; + } + } + } + + /** + * Get the value of the query parameter with the provided name. If no parameter exists with the + * provided parameter name, then undefined will be returned. + */ + public get(parameterName: string): string | string[] | undefined { + return parameterName ? this._rawQuery[parameterName] : undefined; + } + + /** + * Get the string representation of this query. The return value will not start with a "?". + */ + public toString(): string { + let result = ""; + for (const parameterName in this._rawQuery) { + if (result) { + result += "&"; + } + const parameterValue = this._rawQuery[parameterName]; + if (Array.isArray(parameterValue)) { + const parameterStrings = []; + for (const parameterValueElement of parameterValue) { + parameterStrings.push(`${parameterName}=${parameterValueElement}`); + } + result += parameterStrings.join("&"); + } else { + result += `${parameterName}=${parameterValue}`; + } + } + return result; + } + + /** + * Parse a URLQuery from the provided text. + */ + public static parse(text: string): URLQuery { + const result = new URLQuery(); + + if (text) { + if (text.startsWith("?")) { + text = text.substring(1); + } + + let currentState: URLQueryParseState = "ParameterName"; + + let parameterName = ""; + let parameterValue = ""; + for (let i = 0; i < text.length; ++i) { + const currentCharacter: string = text[i]; + switch (currentState) { + case "ParameterName": + switch (currentCharacter) { + case "=": + currentState = "ParameterValue"; + break; + + case "&": + parameterName = ""; + parameterValue = ""; + break; + + default: + parameterName += currentCharacter; + break; + } + break; + + case "ParameterValue": + switch (currentCharacter) { + case "&": + result.set(parameterName, parameterValue); + parameterName = ""; + parameterValue = ""; + currentState = "ParameterName"; + break; + + default: + parameterValue += currentCharacter; + break; + } + break; + + default: + throw new Error("Unrecognized URLQuery parse state: " + currentState); + } + } + if (currentState === "ParameterValue") { + result.set(parameterName, parameterValue); + } + } + + return result; + } +} + +/** + * A class that handles creating, modifying, and parsing URLs. + */ +export class URLBuilder { + private _scheme: string | undefined; + private _host: string | undefined; + private _port: string | undefined; + private _path: string | undefined; + private _query: URLQuery | undefined; + + /** + * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL + * (such as a host, port, path, or query), those parts will be added to this URL as well. + */ + public setScheme(scheme: string | undefined): void { + if (!scheme) { + this._scheme = undefined; + } else { + this.set(scheme, "SCHEME"); + } + } + + /** + * Get the scheme that has been set in this URL. + */ + public getScheme(): string | undefined { + return this._scheme; + } + + /** + * Set the host for this URL. If the provided host contains other parts of a URL (such as a + * port, path, or query), those parts will be added to this URL as well. + */ + public setHost(host: string | undefined): void { + if (!host) { + this._host = undefined; + } else { + this.set(host, "SCHEME_OR_HOST"); + } + } + + /** + * Get the host that has been set in this URL. + */ + public getHost(): string | undefined { + return this._host; + } + + /** + * Set the port for this URL. If the provided port contains other parts of a URL (such as a + * path or query), those parts will be added to this URL as well. + */ + public setPort(port: number | string | undefined): void { + if (port == undefined || port === "") { + this._port = undefined; + } else { + this.set(port.toString(), "PORT"); + } + } + + /** + * Get the port that has been set in this URL. + */ + public getPort(): string | undefined { + return this._port; + } + + /** + * Set the path for this URL. If the provided path contains a query, then it will be added to + * this URL as well. + */ + public setPath(path: string | undefined): void { + if (!path) { + this._path = undefined; + } else { + const schemeIndex = path.indexOf("://"); + if (schemeIndex !== -1) { + const schemeStart = path.lastIndexOf("/", schemeIndex); + // Make sure to only grab the URL part of the path before setting the state back to SCHEME + // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" + this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); + } else { + this.set(path, "PATH"); + } + } + } + + /** + * Append the provided path to this URL's existing path. If the provided path contains a query, + * then it will be added to this URL as well. + */ + public appendPath(path: string | undefined): void { + if (path) { + let currentPath: string | undefined = this.getPath(); + if (currentPath) { + if (!currentPath.endsWith("/")) { + currentPath += "/"; + } + + if (path.startsWith("/")) { + path = path.substring(1); + } + + path = currentPath + path; + } + this.set(path, "PATH"); + } + } + + /** + * Get the path that has been set in this URL. + */ + public getPath(): string | undefined { + return this._path; + } + + /** + * Set the query in this URL. + */ + public setQuery(query: string | undefined): void { + if (!query) { + this._query = undefined; + } else { + this._query = URLQuery.parse(query); + } + } + + /** + * Set a query parameter with the provided name and value in this URL's query. If the provided + * query parameter value is undefined or empty, then the query parameter will be removed if it + * existed. + */ + public setQueryParameter(queryParameterName: string, queryParameterValue: any): void { + if (queryParameterName) { + if (!this._query) { + this._query = new URLQuery(); + } + this._query.set(queryParameterName, queryParameterValue); + } + } + + /** + * Get the value of the query parameter with the provided query parameter name. If no query + * parameter exists with the provided name, then undefined will be returned. + */ + public getQueryParameterValue(queryParameterName: string): string | string[] | undefined { + return this._query ? this._query.get(queryParameterName) : undefined; + } + + /** + * Get the query in this URL. + */ + public getQuery(): string | undefined { + return this._query ? this._query.toString() : undefined; + } + + /** + * Set the parts of this URL by parsing the provided text using the provided startState. + */ + private set(text: string, startState: URLTokenizerState): void { + const tokenizer = new URLTokenizer(text, startState); + + while (tokenizer.next()) { + const token: URLToken | undefined = tokenizer.current(); + if (token) { + switch (token.type) { + case "SCHEME": + this._scheme = token.text || undefined; + break; + + case "HOST": + this._host = token.text || undefined; + break; + + case "PORT": + this._port = token.text || undefined; + break; + + case "PATH": + const tokenPath: string | undefined = token.text || undefined; + if (!this._path || this._path === "/" || tokenPath !== "/") { + this._path = tokenPath; + } + break; + + case "QUERY": + this._query = URLQuery.parse(token.text); + break; + + default: + throw new Error(`Unrecognized URLTokenType: ${token.type}`); + } + } + } + } + + public toString(): string { + let result = ""; + + if (this._scheme) { + result += `${this._scheme}://`; + } + + if (this._host) { + result += this._host; + } + + if (this._port) { + result += `:${this._port}`; + } + + if (this._path) { + if (!this._path.startsWith("/")) { + result += "/"; + } + result += this._path; + } + + if (this._query && this._query.any()) { + result += `?${this._query.toString()}`; + } + + return result; + } + + /** + * If the provided searchValue is found in this URLBuilder, then replace it with the provided + * replaceValue. + */ + public replaceAll(searchValue: string, replaceValue: string): void { + if (searchValue) { + this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); + this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); + this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); + this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); + this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); + } + } + + public static parse(text: string): URLBuilder { + const result = new URLBuilder(); + result.set(text, "SCHEME_OR_HOST"); + return result; + } +} + +type URLTokenizerState = "SCHEME" | "SCHEME_OR_HOST" | "HOST" | "PORT" | "PATH" | "QUERY" | "DONE"; + +type URLTokenType = "SCHEME" | "HOST" | "PORT" | "PATH" | "QUERY"; + +export class URLToken { + public constructor(public readonly text: string, public readonly type: URLTokenType) {} + + public static scheme(text: string): URLToken { + return new URLToken(text, "SCHEME"); + } + + public static host(text: string): URLToken { + return new URLToken(text, "HOST"); + } + + public static port(text: string): URLToken { + return new URLToken(text, "PORT"); + } + + public static path(text: string): URLToken { + return new URLToken(text, "PATH"); + } + + public static query(text: string): URLToken { + return new URLToken(text, "QUERY"); + } +} + +/** + * Get whether or not the provided character (single character string) is an alphanumeric (letter or + * digit) character. + */ +export function isAlphaNumericCharacter(character: string): boolean { + const characterCode: number = character.charCodeAt(0); + return ( + (48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || + (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || + (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */ + ); +} + +/** + * A class that tokenizes URL strings. + */ +export class URLTokenizer { + readonly _textLength: number; + _currentState: URLTokenizerState; + _currentIndex: number; + _currentToken: URLToken | undefined; + + public constructor(readonly _text: string, state?: URLTokenizerState) { + this._textLength = _text ? _text.length : 0; + this._currentState = state != undefined ? state : "SCHEME_OR_HOST"; + this._currentIndex = 0; + } + + /** + * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer + * hasn't started or has finished tokenizing. + */ + public current(): URLToken | undefined { + return this._currentToken; + } + + /** + * Advance to the next URLToken and return whether or not a URLToken was found. + */ + public next(): boolean { + if (!hasCurrentCharacter(this)) { + this._currentToken = undefined; + } else { + switch (this._currentState) { + case "SCHEME": + nextScheme(this); + break; + + case "SCHEME_OR_HOST": + nextSchemeOrHost(this); + break; + + case "HOST": + nextHost(this); + break; + + case "PORT": + nextPort(this); + break; + + case "PATH": + nextPath(this); + break; + + case "QUERY": + nextQuery(this); + break; + + default: + throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); + } + } + return !!this._currentToken; + } +} + +/** + * Read the remaining characters from this Tokenizer's character stream. + */ +function readRemaining(tokenizer: URLTokenizer): string { + let result = ""; + if (tokenizer._currentIndex < tokenizer._textLength) { + result = tokenizer._text.substring(tokenizer._currentIndex); + tokenizer._currentIndex = tokenizer._textLength; + } + return result; +} + +/** + * Whether or not this URLTokenizer has a current character. + */ +function hasCurrentCharacter(tokenizer: URLTokenizer): boolean { + return tokenizer._currentIndex < tokenizer._textLength; +} + +/** + * Get the character in the text string at the current index. + */ +function getCurrentCharacter(tokenizer: URLTokenizer): string { + return tokenizer._text[tokenizer._currentIndex]; +} + +/** + * Advance to the character in text that is "step" characters ahead. If no step value is provided, + * then step will default to 1. + */ +function nextCharacter(tokenizer: URLTokenizer, step?: number): void { + if (hasCurrentCharacter(tokenizer)) { + if (!step) { + step = 1; + } + tokenizer._currentIndex += step; + } +} + +/** + * Starting with the current character, peek "charactersToPeek" number of characters ahead in this + * Tokenizer's stream of characters. + */ +function peekCharacters(tokenizer: URLTokenizer, charactersToPeek: number): string { + let endIndex: number = tokenizer._currentIndex + charactersToPeek; + if (tokenizer._textLength < endIndex) { + endIndex = tokenizer._textLength; + } + return tokenizer._text.substring(tokenizer._currentIndex, endIndex); +} + +/** + * Read characters from this Tokenizer until the end of the stream or until the provided condition + * is false when provided the current character. + */ +function readWhile(tokenizer: URLTokenizer, condition: (character: string) => boolean): string { + let result = ""; + + while (hasCurrentCharacter(tokenizer)) { + const currentCharacter: string = getCurrentCharacter(tokenizer); + if (!condition(currentCharacter)) { + break; + } else { + result += currentCharacter; + nextCharacter(tokenizer); + } + } + + return result; +} + +/** + * Read characters from this Tokenizer until a non-alphanumeric character or the end of the + * character stream is reached. + */ +function readWhileLetterOrDigit(tokenizer: URLTokenizer): string { + return readWhile(tokenizer, (character: string) => isAlphaNumericCharacter(character)); +} + +/** + * Read characters from this Tokenizer until one of the provided terminating characters is read or + * the end of the character stream is reached. + */ +function readUntilCharacter(tokenizer: URLTokenizer, ...terminatingCharacters: string[]): string { + return readWhile( + tokenizer, + (character: string) => terminatingCharacters.indexOf(character) === -1 + ); +} + +function nextScheme(tokenizer: URLTokenizer): void { + const scheme: string = readWhileLetterOrDigit(tokenizer); + tokenizer._currentToken = URLToken.scheme(scheme); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "HOST"; + } +} + +function nextSchemeOrHost(tokenizer: URLTokenizer): void { + const schemeOrHost: string = readUntilCharacter(tokenizer, ":", "/", "?"); + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + if (peekCharacters(tokenizer, 3) === "://") { + tokenizer._currentToken = URLToken.scheme(schemeOrHost); + tokenizer._currentState = "HOST"; + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + tokenizer._currentState = "PORT"; + } + } else { + tokenizer._currentToken = URLToken.host(schemeOrHost); + if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } + } +} + +function nextHost(tokenizer: URLTokenizer): void { + if (peekCharacters(tokenizer, 3) === "://") { + nextCharacter(tokenizer, 3); + } + + const host: string = readUntilCharacter(tokenizer, ":", "/", "?"); + tokenizer._currentToken = URLToken.host(host); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === ":") { + tokenizer._currentState = "PORT"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPort(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === ":") { + nextCharacter(tokenizer); + } + + const port: string = readUntilCharacter(tokenizer, "/", "?"); + tokenizer._currentToken = URLToken.port(port); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else if (getCurrentCharacter(tokenizer) === "/") { + tokenizer._currentState = "PATH"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextPath(tokenizer: URLTokenizer): void { + const path: string = readUntilCharacter(tokenizer, "?"); + tokenizer._currentToken = URLToken.path(path); + + if (!hasCurrentCharacter(tokenizer)) { + tokenizer._currentState = "DONE"; + } else { + tokenizer._currentState = "QUERY"; + } +} + +function nextQuery(tokenizer: URLTokenizer): void { + if (getCurrentCharacter(tokenizer) === "?") { + nextCharacter(tokenizer); + } + + const query: string = readRemaining(tokenizer); + tokenizer._currentToken = URLToken.query(query); + tokenizer._currentState = "DONE"; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts new file mode 100644 index 0000000000..e6b5974195 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.browser.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return btoa(value); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/base64.ts b/node_modules/@azure/ms-rest-js/lib/util/base64.ts new file mode 100644 index 0000000000..ea0c2e0b9d --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/base64.ts @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +/** + * Encodes a string in base64 format. + * @param value the string to encode + */ +export function encodeString(value: string): string { + return Buffer.from(value).toString("base64"); +} + +/** + * Encodes a byte array in base64 format. + * @param value the Uint8Aray to encode + */ +export function encodeByteArray(value: Uint8Array): string { + // Buffer.from accepts | -- the TypeScript definition is off here + // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer as ArrayBuffer); + return bufferValue.toString("base64"); +} + +/** + * Decodes a base64 string into a byte array. + * @param value the base64 string to decode + */ +export function decodeString(value: string): Uint8Array { + return Buffer.from(value, "base64"); +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/constants.ts b/node_modules/@azure/ms-rest-js/lib/util/constants.ts new file mode 100644 index 0000000000..0276bb2b34 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/constants.ts @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +export const Constants = { + /** + * The ms-rest version + * @const + * @type {string} + */ + msRestVersion: "2.6.2", + + /** + * Specifies HTTP. + * + * @const + * @type {string} + */ + HTTP: "http:", + + /** + * Specifies HTTPS. + * + * @const + * @type {string} + */ + HTTPS: "https:", + + /** + * Specifies HTTP Proxy. + * + * @const + * @type {string} + */ + HTTP_PROXY: "HTTP_PROXY", + + /** + * Specifies HTTPS Proxy. + * + * @const + * @type {string} + */ + HTTPS_PROXY: "HTTPS_PROXY", + + /** + * Specifies NO Proxy. + */ + NO_PROXY: "NO_PROXY", + + /** + * Specifies ALL Proxy. + */ + ALL_PROXY: "ALL_PROXY", + + HttpConstants: { + /** + * Http Verbs + * + * @const + * @enum {string} + */ + HttpVerbs: { + PUT: "PUT", + GET: "GET", + DELETE: "DELETE", + POST: "POST", + MERGE: "MERGE", + HEAD: "HEAD", + PATCH: "PATCH", + }, + + StatusCodes: { + TooManyRequests: 429, + }, + }, + + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + * + * @const + * @type {string} + */ + AUTHORIZATION: "authorization", + + AUTHORIZATION_SCHEME: "Bearer", + + /** + * The Retry-After response-header field can be used with a 503 (Service + * Unavailable) or 349 (Too Many Requests) responses to indicate how long + * the service is expected to be unavailable to the requesting client. + * + * @const + * @type {string} + */ + RETRY_AFTER: "Retry-After", + + /** + * The UserAgent header. + * + * @const + * @type {string} + */ + USER_AGENT: "User-Agent", + }, +}; diff --git a/node_modules/@azure/ms-rest-js/lib/util/utils.ts b/node_modules/@azure/ms-rest-js/lib/util/utils.ts new file mode 100644 index 0000000000..0675100d97 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/utils.ts @@ -0,0 +1,288 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { v4 as uuidv4 } from "uuid"; +import { HttpOperationResponse } from "../httpOperationResponse"; +import { RestError } from "../restError"; +import { WebResourceLike } from "../webResource"; +import { Constants } from "./constants"; + +/** + * A constant that indicates whether the environment is node.js or browser based. + */ +export const isNode = + typeof process !== "undefined" && + !!process.version && + !!process.versions && + !!process.versions.node; + +/** + * Checks if a parsed URL is HTTPS + * + * @param {object} urlToCheck The url to check + * @return {boolean} True if the URL is HTTPS; false otherwise. + */ +export function urlIsHTTPS(urlToCheck: { protocol: string }): boolean { + return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; +} + +/** + * Encodes an URI. + * + * @param {string} uri The URI to be encoded. + * @return {string} The encoded URI. + */ +export function encodeUri(uri: string): string { + return encodeURIComponent(uri) + .replace(/!/g, "%21") + .replace(/"/g, "%27") + .replace(/\(/g, "%28") + .replace(/\)/g, "%29") + .replace(/\*/g, "%2A"); +} + +/** + * Returns a stripped version of the Http Response which only contains body, + * headers and the status. + * + * @param {HttpOperationResponse} response The Http Response + * + * @return {object} The stripped version of Http Response. + */ +export function stripResponse(response: HttpOperationResponse): any { + const strippedResponse: any = {}; + strippedResponse.body = response.bodyAsText; + strippedResponse.headers = response.headers; + strippedResponse.status = response.status; + return strippedResponse; +} + +/** + * Returns a stripped version of the Http Request that does not contain the + * Authorization header. + * + * @param {WebResource} request The Http Request object + * + * @return {WebResource} The stripped version of Http Request. + */ +export function stripRequest(request: WebResourceLike): WebResourceLike { + const strippedRequest = request.clone(); + if (strippedRequest.headers) { + strippedRequest.headers.remove("authorization"); + } + return strippedRequest; +} + +/** + * Validates the given uuid as a string + * + * @param {string} uuid The uuid as a string that needs to be validated + * + * @return {boolean} True if the uuid is valid; false otherwise. + */ +export function isValidUuid(uuid: string): boolean { + const validUuidRegex = new RegExp( + "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$", + "ig" + ); + return validUuidRegex.test(uuid); +} + +/** + * Provides an array of values of an object. For example + * for a given object { "a": "foo", "b": "bar" }, the method returns ["foo", "bar"]. + * + * @param {object} obj An object whose properties need to be enumerated so that it"s values can be provided as an array + * + * @return {any[]} An array of values of the given object. + */ +export function objectValues(obj: { [key: string]: any }): any[] { + const result: any[] = []; + if (obj && obj instanceof Object) { + for (const key in obj) { + if (obj.hasOwnProperty(key)) { + result.push((obj)[key]); + } + } + } else { + throw new Error( + `The provided object ${JSON.stringify( + obj, + undefined, + 2 + )} is not a valid object that can be ` + `enumerated to provide its values as an array.` + ); + } + return result; +} + +/** + * Generated UUID + * + * @return {string} RFC4122 v4 UUID. + */ +export function generateUuid(): string { + return uuidv4(); +} + +/** + * Executes an array of promises sequentially. Inspiration of this method is here: + * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! + * + * @param {Array} promiseFactories An array of promise factories(A function that return a promise) + * + * @param {any} [kickstart] Input to the first promise that is used to kickstart the promise chain. + * If not provided then the promise chain starts with undefined. + * + * @return A chain of resolved or rejected promises + */ +export function executePromisesSequentially(promiseFactories: Array, kickstart: any) { + let result = Promise.resolve(kickstart); + promiseFactories.forEach((promiseFactory) => { + result = result.then(promiseFactory); + }); + return result; +} + +/** + * Merges source object into the target object + * @param {object} source The object that needs to be merged + * + * @param {object} target The object to be merged into + * + * @returns {object} Returns the merged target object. + */ +export function mergeObjects(source: { [key: string]: any }, target: { [key: string]: any }) { + Object.keys(source).forEach((key) => { + target[key] = source[key]; + }); + return target; +} + +/** + * A wrapper for setTimeout that resolves a promise after t milliseconds. + * @param {number} t The number of milliseconds to be delayed. + * @param {T} value The value to be resolved with after a timeout of t milliseconds. + * @returns {Promise} Resolved promise + */ +export function delay(t: number, value?: T): Promise { + return new Promise((resolve) => setTimeout(() => resolve(value), t)); +} + +/** + * Service callback that is returned for REST requests initiated by the service client. + */ +export interface ServiceCallback { + /** + * A method that will be invoked as a callback to a service function. + * @param {Error | RestError | null} err The error occurred if any, while executing the request; otherwise null. + * @param {TResult} [result] The deserialized response body if an error did not occur. + * @param {WebResourceLike} [request] The raw/actual request sent to the server if an error did not occur. + * @param {HttpOperationResponse} [response] The raw/actual response from the server if an error did not occur. + */ + ( + err: Error | RestError | null, + result?: TResult, + request?: WebResourceLike, + response?: HttpOperationResponse + ): void; +} + +/** + * Converts a Promise to a callback. + * @param {Promise} promise The Promise to be converted to a callback + * @returns {Function} A function that takes the callback (cb: Function): void + * @deprecated generated code should instead depend on responseToBody + */ +export function promiseToCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: Function): void => { + promise.then( + (data: any) => { + cb(undefined, data); + }, + (err: Error) => { + cb(err); + } + ); + }; +} + +/** + * Converts a Promise to a service callback. + * @param {Promise} promise - The Promise of HttpOperationResponse to be converted to a service callback + * @returns {Function} A function that takes the service callback (cb: ServiceCallback): void + */ +export function promiseToServiceCallback(promise: Promise): Function { + if (typeof promise.then !== "function") { + throw new Error("The provided input is not a Promise."); + } + return (cb: ServiceCallback): void => { + promise.then( + (data: HttpOperationResponse) => { + process.nextTick(cb, undefined, data.parsedBody as T, data.request, data); + }, + (err: Error) => { + process.nextTick(cb, err); + } + ); + }; +} + +export function prepareXMLRootList(obj: any, elementName: string) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + return { [elementName]: obj }; +} + +/** + * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor + * @param {object} targetCtor The target object on which the properties need to be applied. + * @param {Array} sourceCtors An array of source objects from which the properties need to be taken. + */ +export function applyMixins(targetCtor: any, sourceCtors: any[]): void { + sourceCtors.forEach((sourceCtors) => { + Object.getOwnPropertyNames(sourceCtors.prototype).forEach((name) => { + targetCtor.prototype[name] = sourceCtors.prototype[name]; + }); + }); +} + +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; + +/** + * Indicates whether the given string is in ISO 8601 format. + * @param {string} value The value to be validated for ISO 8601 duration format. + * @return {boolean} `true` if valid, `false` otherwise. + */ +export function isDuration(value: string): boolean { + return validateISODuration.test(value); +} + +/** + * Replace all of the instances of searchValue in value with the provided replaceValue. + * @param {string | undefined} value The value to search and replace in. + * @param {string} searchValue The value to search for in the value argument. + * @param {string} replaceValue The value to replace searchValue with in the value argument. + * @returns {string | undefined} The value where each instance of searchValue was replaced with replacedValue. + */ +export function replaceAll( + value: string | undefined, + searchValue: string, + replaceValue: string +): string | undefined { + return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); +} + +/** + * Determines whether the given enity is a basic/primitive type + * (string, number, boolean, null, undefined). + * @param value Any entity + * @return boolean - true is it is primitive type, false otherwise. + */ +export function isPrimitiveType(value: any): boolean { + return (typeof value !== "object" && typeof value !== "function") || value === null; +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts new file mode 100644 index 0000000000..3ff63768c6 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.browser.ts @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +const parser = new DOMParser(); + +// Policy to make our code Trusted Types compliant. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +let ttPolicy: Pick | undefined; +if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/ms-rest-js#xml.browser", { + createHTML: (s) => s, + }); +} + +export function parseXML(str: string): Promise { + try { + const dom = parser.parseFromString((ttPolicy?.createHTML(str) ?? str) as string, "application/xml"); + throwIfError(dom); + + const obj = domToObject(dom.childNodes[0]); + return Promise.resolve(obj); + } catch (err) { + return Promise.reject(err); + } +} + +let errorNS = ""; +try { + const invalidXML = (ttPolicy?.createHTML("INVALID") ?? "INVALID") as string; + errorNS = + parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI! ?? ""; +} catch (ignored) { + // Most browsers will return a document containing , but IE will throw. +} + +function throwIfError(dom: Document) { + if (errorNS) { + const parserErrors = dom.getElementsByTagNameNS(errorNS, "parsererror"); + if (parserErrors.length) { + throw new Error(parserErrors.item(0)!.innerHTML); + } + } +} + +function isElement(node: Node): node is Element { + return !!(node as Element).attributes; +} + +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node: Node): Element | undefined { + return isElement(node) && node.hasAttributes() ? node : undefined; +} + +function domToObject(node: Node): any { + let result: any = {}; + + const childNodeCount: number = node.childNodes.length; + + const firstChildNode: Node = node.childNodes[0]; + const onlyChildTextValue: string | undefined = + (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + + const elementWithAttributes: Element | undefined = asElementWithAttributes(node); + if (elementWithAttributes) { + result["$"] = {}; + + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result["$"][attr.nodeName] = attr.nodeValue; + } + + if (onlyChildTextValue) { + result["_"] = onlyChildTextValue; + } + } else if (childNodeCount === 0) { + result = ""; + } else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Ignore leading/trailing whitespace nodes + if (child.nodeType !== Node.TEXT_NODE) { + const childObject: any = domToObject(child); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + + return result; +} + +// tslint:disable-next-line:no-null-keyword +const doc = document.implementation.createDocument(null, null, null); +const serializer = new XMLSerializer(); + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const rootName = (opts && opts.rootName) || "root"; + const dom = buildNode(obj, rootName)[0]; + return ( + '' + serializer.serializeToString(dom) + ); +} + +function buildAttributes(attrs: { [key: string]: { toString(): string } }): Attr[] { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} + +function buildNode(obj: any, elementName: string): Node[] { + if (typeof obj === "string" || typeof obj === "number" || typeof obj === "boolean") { + const elem = doc.createElement(elementName); + elem.textContent = obj.toString(); + return [elem]; + } else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName)) { + result.push(child); + } + } + return result; + } else if (typeof obj === "object") { + const elem = doc.createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === "$") { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } else { + for (const child of buildNode(obj[key], key)) { + elem.appendChild(child); + } + } + } + return [elem]; + } else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} diff --git a/node_modules/@azure/ms-rest-js/lib/util/xml.ts b/node_modules/@azure/ms-rest-js/lib/util/xml.ts new file mode 100644 index 0000000000..b11ff5cd4e --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/util/xml.ts @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import * as xml2js from "xml2js"; + +export function stringifyXML(obj: any, opts?: { rootName?: string }) { + const builder = new xml2js.Builder({ + rootName: (opts || {}).rootName, + renderOpts: { + pretty: false, + }, + }); + return builder.buildObject(obj); +} + +export function parseXML(str: string): Promise { + const xmlParser = new xml2js.Parser({ + explicitArray: false, + explicitCharkey: false, + explicitRoot: false, + }); + return new Promise((resolve, reject) => { + if (!str) { + reject(new Error("Document is empty")); + } else { + xmlParser.parseString(str, (err: any, res: any) => { + if (err) { + reject(err); + } else { + resolve(res); + } + }); + } + }); +} diff --git a/node_modules/@azure/ms-rest-js/lib/webResource.ts b/node_modules/@azure/ms-rest-js/lib/webResource.ts new file mode 100644 index 0000000000..10bb55fea7 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/webResource.ts @@ -0,0 +1,668 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpHeaders, HttpHeadersLike, isHttpHeadersLike } from "./httpHeaders"; +import { OperationSpec } from "./operationSpec"; +import { Mapper, Serializer } from "./serializer"; +import { generateUuid } from "./util/utils"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { OperationResponse } from "./operationResponse"; +import { AgentSettings, ProxySettings } from "./serviceClient"; + +export type HttpMethods = + | "GET" + | "PUT" + | "POST" + | "DELETE" + | "PATCH" + | "HEAD" + | "OPTIONS" + | "TRACE"; +export type HttpRequestBody = + | Blob + | string + | ArrayBuffer + | ArrayBufferView + | (() => NodeJS.ReadableStream); + +/** + * Fired in response to upload or download progress. + */ +export type TransferProgressEvent = { + /** + * The number of bytes loaded so far. + */ + loadedBytes: number; +}; + +/** + * Allows the request to be aborted upon firing of the "abort" event. + * Compatible with the browser built-in AbortSignal and common polyfills. + */ +export interface AbortSignalLike { + readonly aborted: boolean; + dispatchEvent: (event: Event) => boolean; + onabort: ((this: AbortSignalLike, ev: Event) => any) | null; + addEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; + removeEventListener: ( + type: "abort", + listener: (this: AbortSignalLike, ev: Event) => any, + options?: any + ) => void; +} + +/** + * An abstraction over a REST call. + */ +export interface WebResourceLike { + /** + * The URL being accessed by the request. + */ + url: string; + /** + * The HTTP method to use when making the request. + */ + method: HttpMethods; + /** + * The HTTP body contents of the request. + */ + body?: any; + /** + * The HTTP headers to use when making the request. + */ + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + /** + * A query string represented as an object. + */ + query?: { [key: string]: any }; + /** + * Used to parse the response. + */ + operationSpec?: OperationSpec; + /** + * If credentials (cookies) should be sent along during an XHR. + */ + withCredentials: boolean; + /** + * The number of milliseconds a request can take before automatically being terminated. + * If the request is terminated, an `AbortError` is thrown. + */ + timeout: number; + /** + * Proxy configuration. + */ + proxySettings?: ProxySettings; + /** + * HTTP(S) agent configuration. + */ + agentSettings?: AgentSettings; + /** + * If the connection should be reused. + */ + keepAlive?: boolean; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + /** + * Used to abort the request later. + */ + abortSignal?: AbortSignalLike; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void; + + /** + * Sets options on the request. + */ + prepare(options: RequestPrepareOptions): WebResourceLike; + /** + * Clone this request object. + */ + clone(): WebResourceLike; +} + +export function isWebResourceLike(object: any): object is WebResourceLike { + if (typeof object !== "object") { + return false; + } + if ( + typeof object.url === "string" && + typeof object.method === "string" && + typeof object.headers === "object" && + isHttpHeadersLike(object.headers) && + typeof object.validateRequestProperties === "function" && + typeof object.prepare === "function" && + typeof object.clone === "function" + ) { + return true; + } + return false; +} + +/** + * Creates a new WebResource object. + * + * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary + * properties to initiate a request. + * + * @constructor + */ +export class WebResource { + url: string; + method: HttpMethods; + body?: any; + headers: HttpHeadersLike; + /** + * Whether or not the body of the HttpOperationResponse should be treated as a stream. + */ + streamResponseBody?: boolean; + /** + * Whether or not the HttpOperationResponse should be deserialized. If this is undefined, then the + * HttpOperationResponse should be deserialized. + */ + shouldDeserialize?: boolean | ((response: HttpOperationResponse) => boolean); + /** + * A function that returns the proper OperationResponse for the given OperationSpec and + * HttpOperationResponse combination. If this is undefined, then a simple status code lookup will + * be used. + */ + operationResponseGetter?: ( + operationSpec: OperationSpec, + response: HttpOperationResponse + ) => undefined | OperationResponse; + formData?: any; + query?: { [key: string]: any }; + operationSpec?: OperationSpec; + withCredentials: boolean; + timeout: number; + proxySettings?: ProxySettings; + keepAlive?: boolean; + agentSettings?: AgentSettings; + redirectLimit?: number; + + abortSignal?: AbortSignalLike; + + /** Callback which fires upon upload progress. */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** Callback which fires upon download progress. */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + constructor( + url?: string, + method?: HttpMethods, + body?: any, + query?: { [key: string]: any }, + headers?: { [key: string]: any } | HttpHeadersLike, + streamResponseBody?: boolean, + withCredentials?: boolean, + abortSignal?: AbortSignalLike, + timeout?: number, + onUploadProgress?: (progress: TransferProgressEvent) => void, + onDownloadProgress?: (progress: TransferProgressEvent) => void, + proxySettings?: ProxySettings, + keepAlive?: boolean, + agentSettings?: AgentSettings, + redirectLimit?: number + ) { + this.streamResponseBody = streamResponseBody; + this.url = url || ""; + this.method = method || "GET"; + this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); + this.body = body; + this.query = query; + this.formData = undefined; + this.withCredentials = withCredentials || false; + this.abortSignal = abortSignal; + this.timeout = timeout || 0; + this.onUploadProgress = onUploadProgress; + this.onDownloadProgress = onDownloadProgress; + this.proxySettings = proxySettings; + this.keepAlive = keepAlive; + this.agentSettings = agentSettings; + this.redirectLimit = redirectLimit; + } + + /** + * Validates that the required properties such as method, url, headers["Content-Type"], + * headers["accept-language"] are defined. It will throw an error if one of the above + * mentioned properties are not defined. + */ + validateRequestProperties(): void { + if (!this.method) { + throw new Error("WebResource.method is required."); + } + if (!this.url) { + throw new Error("WebResource.url is required."); + } + } + + /** + * Prepares the request. + * @param {RequestPrepareOptions} options Options to provide for preparing the request. + * @returns {WebResource} Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. + */ + prepare(options: RequestPrepareOptions): WebResource { + if (!options) { + throw new Error("options object is required"); + } + + if (options.method == undefined || typeof options.method.valueOf() !== "string") { + throw new Error("options.method must be a string."); + } + + if (options.url && options.pathTemplate) { + throw new Error( + "options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them." + ); + } + + if ( + (options.pathTemplate == undefined || typeof options.pathTemplate.valueOf() !== "string") && + (options.url == undefined || typeof options.url.valueOf() !== "string") + ) { + throw new Error("Please provide exactly one of options.pathTemplate or options.url."); + } + + // set the url if it is provided. + if (options.url) { + if (typeof options.url !== "string") { + throw new Error('options.url must be of type "string".'); + } + this.url = options.url; + } + + // set the method + if (options.method) { + const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; + if (validMethods.indexOf(options.method.toUpperCase()) === -1) { + throw new Error( + 'The provided method "' + + options.method + + '" is invalid. Supported HTTP methods are: ' + + JSON.stringify(validMethods) + ); + } + } + this.method = options.method.toUpperCase() as HttpMethods; + + // construct the url if path template is provided + if (options.pathTemplate) { + const { pathTemplate, pathParameters } = options; + if (typeof pathTemplate !== "string") { + throw new Error('options.pathTemplate must be of type "string".'); + } + if (!options.baseUrl) { + options.baseUrl = "https://management.azure.com"; + } + const baseUrl = options.baseUrl; + let url = + baseUrl + + (baseUrl.endsWith("/") ? "" : "/") + + (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); + const segments = url.match(/({\w*\s*\w*})/gi); + if (segments && segments.length) { + if (!pathParameters) { + throw new Error( + `pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.` + ); + } + segments.forEach(function (item) { + const pathParamName = item.slice(1, -1); + const pathParam = (pathParameters as { [key: string]: any })[pathParamName]; + if ( + pathParam === null || + pathParam === undefined || + !(typeof pathParam === "string" || typeof pathParam === "object") + ) { + throw new Error( + `pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + + ` however, it is not present in ${pathParameters} - ${JSON.stringify( + pathParameters, + undefined, + 2 + )}.` + + `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + + `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.` + ); + } + + if (typeof pathParam.valueOf() === "string") { + url = url.replace(item, encodeURIComponent(pathParam)); + } + + if (typeof pathParam.valueOf() === "object") { + if (!pathParam.value) { + throw new Error( + `options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (pathParam.skipUrlEncoding) { + url = url.replace(item, pathParam.value); + } else { + url = url.replace(item, encodeURIComponent(pathParam.value)); + } + } + }); + } + this.url = url; + } + + // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. + if (options.queryParameters) { + const queryParameters = options.queryParameters; + if (typeof queryParameters !== "object") { + throw new Error( + `options.queryParameters must be of type object. It should be a JSON object ` + + `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + + `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.` + ); + } + // append question mark if it is not present in the url + if (this.url && this.url.indexOf("?") === -1) { + this.url += "?"; + } + // construct queryString + const queryParams = []; + // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). + this.query = {}; + for (const queryParamName in queryParameters) { + const queryParam: any = queryParameters[queryParamName]; + if (queryParam) { + if (typeof queryParam === "string") { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); + this.query[queryParamName] = encodeURIComponent(queryParam); + } else if (typeof queryParam === "object") { + if (!queryParam.value) { + throw new Error( + `options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.` + ); + } + if (queryParam.skipUrlEncoding) { + queryParams.push(queryParamName + "=" + queryParam.value); + this.query[queryParamName] = queryParam.value; + } else { + queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); + this.query[queryParamName] = encodeURIComponent(queryParam.value); + } + } + } + } // end-of-for + // append the queryString + this.url += queryParams.join("&"); + } + + // add headers to the request if they are provided + if (options.headers) { + const headers = options.headers; + for (const headerName of Object.keys(options.headers)) { + this.headers.set(headerName, headers[headerName]); + } + } + // ensure accept-language is set correctly + if (!this.headers.get("accept-language")) { + this.headers.set("accept-language", "en-US"); + } + // ensure the request-id is set correctly + if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { + this.headers.set("x-ms-client-request-id", generateUuid()); + } + + // default + if (!this.headers.get("Content-Type")) { + this.headers.set("Content-Type", "application/json; charset=utf-8"); + } + + // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicilty + this.body = options.body; + if (options.body != undefined) { + // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. + if (options.bodyIsStream) { + if (!this.headers.get("Transfer-Encoding")) { + this.headers.set("Transfer-Encoding", "chunked"); + } + if (this.headers.get("Content-Type") !== "application/octet-stream") { + this.headers.set("Content-Type", "application/octet-stream"); + } + } else { + if (options.serializationMapper) { + this.body = new Serializer(options.mappers).serialize( + options.serializationMapper, + options.body, + "requestBody" + ); + } + if (!options.disableJsonStringifyOnBody) { + this.body = JSON.stringify(options.body); + } + } + } + + this.abortSignal = options.abortSignal; + this.onDownloadProgress = options.onDownloadProgress; + this.onUploadProgress = options.onUploadProgress; + this.redirectLimit = options.redirectLimit; + this.streamResponseBody = options.streamResponseBody; + + return this; + } + + /** + * Clone this WebResource HTTP request object. + * @returns {WebResource} The clone of this WebResource HTTP request object. + */ + clone(): WebResource { + const result = new WebResource( + this.url, + this.method, + this.body, + this.query, + this.headers && this.headers.clone(), + this.streamResponseBody, + this.withCredentials, + this.abortSignal, + this.timeout, + this.onUploadProgress, + this.onDownloadProgress, + this.proxySettings, + this.keepAlive, + this.agentSettings, + this.redirectLimit + ); + + if (this.formData) { + result.formData = this.formData; + } + + if (this.operationSpec) { + result.operationSpec = this.operationSpec; + } + + if (this.shouldDeserialize) { + result.shouldDeserialize = this.shouldDeserialize; + } + + if (this.operationResponseGetter) { + result.operationResponseGetter = this.operationResponseGetter; + } + + return result; + } +} + +export interface RequestPrepareOptions { + /** + * The HTTP request method. Valid values are "GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", + * or "PATCH". + */ + method: HttpMethods; + /** + * The request url. It may or may not have query parameters in it. Either provide the "url" or + * provide the "pathTemplate" in the options object. Both the options are mutually exclusive. + */ + url?: string; + /** + * A dictionary of query parameters to be appended to the url, where + * the "key" is the "query-parameter-name" and the "value" is the "query-parameter-value". + * The "query-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "query-parameter-value". + * Example: + * - query-parameter-value in "object" format: { "query-parameter-name": { value: "query-parameter-value", skipUrlEncoding: true } } + * - query-parameter-value in "string" format: { "query-parameter-name": "query-parameter-value"}. + * Note: "If options.url already has some query parameters, then the value provided in options.queryParameters will be appended to the url. + */ + queryParameters?: { [key: string]: any | ParameterValue }; + /** + * The path template of the request url. Either provide the "url" or provide the "pathTemplate" in + * the options object. Both the options are mutually exclusive. + * Example: "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}" + */ + pathTemplate?: string; + /** + * The base url of the request. Default value is: "https://management.azure.com". This is + * applicable only with pathTemplate. If you are providing options.url then it is expected that + * you provide the complete url. + */ + baseUrl?: string; + /** + * A dictionary of path parameters that need to be replaced with actual values in the pathTemplate. + * Here the key is the "path-parameter-name" and the value is the "path-parameter-value". + * The "path-parameter-value" can be of type "string" or it can be of type "object". + * The "object" format should be used when you want to skip url encoding. While using the object format, + * the object must have a property named value which provides the "path-parameter-value". + * Example: + * - path-parameter-value in "object" format: { "path-parameter-name": { value: "path-parameter-value", skipUrlEncoding: true } } + * - path-parameter-value in "string" format: { "path-parameter-name": "path-parameter-value" }. + */ + pathParameters?: { [key: string]: any | ParameterValue }; + formData?: { [key: string]: any }; + /** + * A dictionary of request headers that need to be applied to the request. + * Here the key is the "header-name" and the value is the "header-value". The header-value MUST be of type string. + * - ContentType must be provided with the key name as "Content-Type". Default value "application/json; charset=utf-8". + * - "Transfer-Encoding" is set to "chunked" by default if "options.bodyIsStream" is set to true. + * - "Content-Type" is set to "application/octet-stream" by default if "options.bodyIsStream" is set to true. + * - "accept-language" by default is set to "en-US" + * - "x-ms-client-request-id" by default is set to a new Guid. To not generate a guid for the request, please set options.disableClientRequestId to true + */ + headers?: { [key: string]: any }; + /** + * When set to true, instructs the client to not set "x-ms-client-request-id" header to a new Guid(). + */ + disableClientRequestId?: boolean; + /** + * The request body. It can be of any type. This value will be serialized if it is not a stream. + */ + body?: any; + /** + * Provides information on how to serialize the request body. + */ + serializationMapper?: Mapper; + /** + * A dictionary of mappers that may be used while [de]serialization. + */ + mappers?: { [x: string]: any }; + /** + * Provides information on how to deserialize the response body. + */ + deserializationMapper?: object; + /** + * Indicates whether this method should JSON.stringify() the request body. Default value: false. + */ + disableJsonStringifyOnBody?: boolean; + /** + * Indicates whether the request body is a stream (useful for file upload scenarios). + */ + bodyIsStream?: boolean; + abortSignal?: AbortSignalLike; + /** + * Limit the number of redirects followed for this request. If set to 0, redirects will not be followed. + * If left undefined the default redirect behaviour of the underlying node_fetch will apply. + */ + redirectLimit?: number; + + onUploadProgress?: (progress: TransferProgressEvent) => void; + onDownloadProgress?: (progress: TransferProgressEvent) => void; + streamResponseBody?: boolean; +} + +/** + * The Parameter value provided for path or query parameters in RequestPrepareOptions + */ +export interface ParameterValue { + value: any; + skipUrlEncoding: boolean; + [key: string]: any; +} + +/** + * Describes the base structure of the options object that will be used in every operation. + */ +export interface RequestOptionsBase { + /** + * @property {object} [customHeaders] User defined custom request headers that + * will be applied before the request is sent. + */ + customHeaders?: { [key: string]: string }; + + /** + * The signal which can be used to abort requests. + */ + abortSignal?: AbortSignalLike; + + /** + * The number of milliseconds a request can take before automatically being terminated. + */ + timeout?: number; + + /** + * Callback which fires upon upload progress. + */ + onUploadProgress?: (progress: TransferProgressEvent) => void; + + /** + * Callback which fires upon download progress. + */ + onDownloadProgress?: (progress: TransferProgressEvent) => void; + + [key: string]: any; +} diff --git a/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts new file mode 100644 index 0000000000..698d701d36 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/lib/xhrHttpClient.ts @@ -0,0 +1,178 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. See License.txt in the project root for license information. + +import { HttpClient } from "./httpClient"; +import { HttpHeaders } from "./httpHeaders"; +import { WebResourceLike, TransferProgressEvent } from "./webResource"; +import { HttpOperationResponse } from "./httpOperationResponse"; +import { RestError } from "./restError"; + +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + */ +export class XhrHttpClient implements HttpClient { + public sendRequest(request: WebResourceLike): Promise { + const xhr = new XMLHttpRequest(); + + if (request.agentSettings) { + throw new Error("HTTP agent settings not supported in browser environment"); + } + + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + + const abortSignal = request.abortSignal; + if (abortSignal) { + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + + if (request.formData) { + const formData = request.formData; + const requestForm = new FormData(); + const appendFormValue = (key: string, value: any) => { + if (value && value.hasOwnProperty("value") && value.hasOwnProperty("options")) { + requestForm.append(key, value.value, value.options); + } else { + requestForm.append(key, value); + } + }; + for (const formKey of Object.keys(formData)) { + const formValue = formData[formKey]; + if (Array.isArray(formValue)) { + for (let j = 0; j < formValue.length; j++) { + appendFormValue(formKey, formValue[j]); + } + } else { + appendFormValue(formKey, formValue); + } + } + + request.body = requestForm; + request.formData = undefined; + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("multipart/form-data") !== -1) { + // browser will automatically apply a suitable content-type header + request.headers.remove("Content-Type"); + } + } + + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const header of request.headers.headersArray()) { + xhr.setRequestHeader(header.name, header.value); + } + xhr.responseType = request.streamResponseBody ? "blob" : "text"; + + // tslint:disable-next-line:no-null-keyword + xhr.send(request.body === undefined ? null : request.body); + + if (request.streamResponseBody) { + return new Promise((resolve, reject) => { + xhr.addEventListener("readystatechange", () => { + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + } else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => + resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + }) + ); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} + +function addProgressListener( + xhr: XMLHttpRequestEventTarget, + listener?: (progress: TransferProgressEvent) => void +) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => + listener({ + loadedBytes: rawEvent.loaded, + }) + ); + } +} + +// exported locally for testing +export function parseHeaders(xhr: XMLHttpRequest) { + const responseHeaders = new HttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} + +function rejectOnTerminalEvent( + request: WebResourceLike, + xhr: XMLHttpRequest, + reject: (err: any) => void +) { + xhr.addEventListener("error", () => + reject( + new RestError( + `Failed to send request to ${request.url}`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); + xhr.addEventListener("abort", () => + reject( + new RestError("The request was aborted", RestError.REQUEST_ABORTED_ERROR, undefined, request) + ) + ); + xhr.addEventListener("timeout", () => + reject( + new RestError( + `timeout of ${xhr.timeout}ms exceeded`, + RestError.REQUEST_SEND_ERROR, + undefined, + request + ) + ) + ); +} diff --git a/node_modules/@azure/ms-rest-js/package.json b/node_modules/@azure/ms-rest-js/package.json new file mode 100644 index 0000000000..7b38e01470 --- /dev/null +++ b/node_modules/@azure/ms-rest-js/package.json @@ -0,0 +1,182 @@ +{ + "name": "@azure/ms-rest-js", + "author": { + "name": "Microsoft Corporation", + "email": "azsdkteam@microsoft.com", + "url": "https://github.com/Azure/ms-rest-js" + }, + "version": "2.6.2", + "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", + "tags": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "keywords": [ + "isomorphic", + "browser", + "javascript", + "node", + "microsoft", + "autorest", + "clientruntime" + ], + "main": "./dist/msRest.node.js", + "module": "./es/lib/msRest.js", + "types": "./es/lib/msRest.d.ts", + "files": [ + "dist/**/*.js", + "dist/**/*.js.map", + "es/lib/**/*.js", + "es/lib/**/*.js.map", + "es/lib/**/*.d.ts", + "es/lib/**/*.d.ts.map", + "lib/**/!(dom.d).ts", + "dom-shim.d.ts", + "LICENSE", + "README.md", + "ThirdPartyNotices.txt" + ], + "browser": { + "./es/lib/policies/msRestUserAgentPolicy.js": "./es/lib/policies/msRestUserAgentPolicy.browser.js", + "./es/lib/policies/agentPolicy.js": "./es/lib/policies/agentPolicy.browser.js", + "./es/lib/policies/proxyPolicy.js": "./es/lib/policies/proxyPolicy.browser.js", + "./es/lib/util/base64.js": "./es/lib/util/base64.browser.js", + "./es/lib/util/xml.js": "./es/lib/util/xml.browser.js", + "./es/lib/defaultHttpClient.js": "./es/lib/defaultHttpClient.browser.js" + }, + "license": "MIT", + "dependencies": { + "@azure/core-auth": "^1.1.4", + "abort-controller": "^3.0.0", + "form-data": "^2.5.0", + "node-fetch": "^2.6.7", + "tough-cookie": "^3.0.1", + "tslib": "^1.10.0", + "tunnel": "0.0.6", + "uuid": "^8.3.2", + "xml2js": "^0.4.19" + }, + "devDependencies": { + "@azure/logger-js": "^1.1.0", + "@microsoft/api-extractor": "^7.18.11", + "@ts-common/azure-js-dev-tools": "^19.4.0", + "@types/chai": "^4.1.7", + "@types/express": "^4.17.0", + "@types/fetch-mock": "^7.3.1", + "@types/form-data": "^2.2.1", + "@types/glob": "^7.1.1", + "@types/karma": "^3.0.3", + "@types/mocha": "^5.2.7", + "@types/node": "^12.0.12", + "@types/node-fetch": "^2.3.7", + "@types/semver": "^6.0.1", + "@types/sinon": "^7.0.13", + "@types/tough-cookie": "^2.3.5", + "@types/trusted-types": "^2.0.0", + "@types/tunnel": "0.0.1", + "@types/uuid": "^8.3.2", + "@types/webpack": "^4.4.34", + "@types/webpack-dev-middleware": "^2.0.3", + "@types/xml2js": "^0.4.4", + "abortcontroller-polyfill": "^1.3.0", + "chai": "4.3.4", + "cross-env": "^7.0.3", + "express": "^4.17.1", + "fetch-mock": "^7.3.3", + "glob": "^7.1.4", + "karma": "^4.1.0", + "karma-chai": "^0.1.0", + "karma-chrome-launcher": "^2.2.0", + "karma-firefox-launcher": "^1.1.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0", + "karma-sourcemap-loader": "^0.3.7", + "karma-typescript-es6-transform": "^4.1.1", + "karma-webpack": "^4.0.2", + "mocha": "^6.1.4", + "mocha-chrome": "^2.0.0", + "mocha-junit-reporter": "^1.23.0", + "mocha-multi-reporters": "^1.1.7", + "npm-run-all": "^4.1.5", + "nyc": "^14.1.1", + "prettier": "2.2.1", + "rollup": "^1.16.6", + "rollup-plugin-commonjs": "^10.0.1", + "rollup-plugin-json": "^4.0.0", + "rollup-plugin-multi-entry": "^2.1.0", + "rollup-plugin-node-resolve": "^5.2.0", + "rollup-plugin-resolve": "0.0.1-predev.1", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-visualizer": "^2.4.4", + "semver": "^6.2.0", + "shx": "^0.3.2", + "sinon": "^7.3.2", + "terser": "^4.0.2", + "ts-loader": "^6.0.4", + "ts-node": "^8.3.0", + "tslint": "^5.18.0", + "tslint-eslint-rules": "^5.4.0", + "typescript": "^3.5.2", + "webpack": "^4.35.2", + "webpack-cli": "^3.3.5", + "webpack-dev-middleware": "^3.7.0", + "xhr-mock": "^2.4.1", + "yarn": "^1.16.0" + }, + "homepage": "https://github.com/Azure/ms-rest-js", + "repository": { + "type": "git", + "url": "git@github.com:Azure/ms-rest-js.git" + }, + "bugs": { + "url": "http://github.com/Azure/ms-rest-js/issues" + }, + "scripts": { + "build": "run-p build:scripts build:lib", + "build:scripts": "tsc -p ./.scripts/", + "build:lib": "run-s build:tsc build:rollup build:minify-browser extract-api", + "build:tsc": "tsc -p tsconfig.es.json", + "build:rollup": "rollup -c rollup.config.ts", + "build:minify-browser": "terser -c -m --comments --source-map \"content='./dist/msRest.browser.js.map'\" -o ./dist/msRest.browser.min.js ./dist/msRest.browser.js", + "build:test-browser": "webpack --config webpack.testconfig.ts", + "extract-api": "api-extractor run --local", + "format": "prettier --write \"./**/*.ts\" *.json", + "test": "run-p test:tslint test:unit test:karma", + "test:tslint": "tslint -p .", + "test:unit": "cross-env TS_NODE_FILES=true nyc mocha", + "test:karma": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --browsers ChromeNoSecurity --single-run ", + "test:karma:debug": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers ChromeDebugging --debug --auto-watch", + "test:karma:debugff": "npm run build:test-browser && node ./node_modules/karma/bin/karma start karma.conf.ts --log-level debug --browsers FirefoxDebugging --debug --auto-watch", + "dep:autorest.typescript": "npx ts-node .scripts/testDependentProjects.ts autorest.typescript 'gulp build' 'gulp regenerate' 'npm run local'", + "dep:ms-rest-azure-js": "npx ts-node .scripts/testDependentProjects.ts ms-rest-azure-js", + "publish-preview": "mocha --no-colors && shx rm -rf dist/test && node ./.scripts/publish", + "local": "ts-node ./.scripts/local.ts", + "latest": "ts-node ./.scripts/latest.ts", + "prepack": "npm i && npm run build", + "check:packagejsonversion": "ts-node ./.scripts/checkPackageJsonVersion.ts", + "check:foronlycalls": "ts-node ./.scripts/checkForOnlyCalls.ts", + "check:everything": "ts-node ./.scripts/checkEverything.ts" + }, + "sideEffects": false, + "nyc": { + "extension": [ + ".ts" + ], + "exclude": [ + "coverage/**/*", + "**/*.d.ts", + "**/*.js" + ], + "reporter": [ + "text", + "html", + "cobertura" + ], + "all": true + } +} diff --git a/node_modules/@azure/storage-blob/LICENSE b/node_modules/@azure/storage-blob/LICENSE new file mode 100644 index 0000000000..ea8fb15160 --- /dev/null +++ b/node_modules/@azure/storage-blob/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Microsoft + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@azure/storage-blob/README.md b/node_modules/@azure/storage-blob/README.md new file mode 100644 index 0000000000..93f5754c45 --- /dev/null +++ b/node_modules/@azure/storage-blob/README.md @@ -0,0 +1,533 @@ +# Azure Storage Blob client library for JavaScript + +Azure Storage Blob is Microsoft's object storage solution for the cloud. Blob storage is optimized for storing massive amounts of unstructured data. Unstructured data is data that does not adhere to a particular data model or definition, such as text or binary data. + +This project provides a client library in JavaScript that makes it easy to consume Microsoft Azure Storage Blob service. + +Use the client libraries in this package to: + +- Get/Set Blob Service Properties +- Create/List/Delete Containers +- Create/Read/List/Update/Delete Block Blobs +- Create/Read/List/Update/Delete Page Blobs +- Create/Read/List/Update/Delete Append Blobs + +Key links + +- [Source code](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob) +- [Package (npm)](https://www.npmjs.com/package/@azure/storage-blob/) +- [API Reference Documentation](https://docs.microsoft.com/javascript/api/@azure/storage-blob) +- [Product documentation](https://docs.microsoft.com/azure/storage/blobs/storage-blobs-overview) +- [Samples](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples) +- [Azure Storage Blob REST APIs](https://docs.microsoft.com/rest/api/storageservices/blob-service-rest-api) + +## Getting started + +### Currently supported environments + +- [LTS versions of Node.js](https://nodejs.org/about/releases/) +- Latest versions of Safari, Chrome, Edge, and Firefox. + +See our [support policy](https://github.com/Azure/azure-sdk-for-js/blob/main/SUPPORT.md) for more details. + +### Prerequisites + +- An [Azure subscription](https://azure.microsoft.com/free/) +- A [Storage Account](https://docs.microsoft.com/azure/storage/blobs/storage-quickstart-blobs-portal) + +### Install the package + +The preferred way to install the Azure Storage Blob client library for JavaScript is to use the npm package manager. Type the following into a terminal window: + +```bash +npm install @azure/storage-blob +``` + +### Authenticate the client + +Azure Storage supports several ways to authenticate. In order to interact with the Azure Blob Storage service you'll need to create an instance of a Storage client - `BlobServiceClient`, `ContainerClient`, or `BlobClient` for example. See [samples for creating the `BlobServiceClient`](#create-the-blob-service-client) to learn more about authentication. + +- [Azure Active Directory](#with-defaultazurecredential-from-azureidentity-package) +- [Shared Key](#with-storagesharedkeycredential) +- [Shared access signatures](#with-sas-token) + +#### Azure Active Directory + +The Azure Blob Storage service supports the use of Azure Active Directory to authenticate requests to its APIs. The [`@azure/identity`](https://www.npmjs.com/package/@azure/identity) package provides a variety of credential types that your application can use to do this. Please see the [README for `@azure/identity`](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/identity/identity/README.md) for more details and samples to get you started. + +### Compatibility + +This library is compatible with Node.js and browsers, and validated against LTS Node.js versions (>=8.16.0) and latest versions of Chrome, Firefox and Edge. + +#### Web Workers + +This library requires certain DOM objects to be globally available when used in the browser, which web workers do not make available by default. You will need to polyfill these to make this library work in web workers. + +For more information please refer to our [documentation for using Azure SDK for JS in Web Workers](https://aka.ms/azsdk/js/web-workers) + +This library depends on following DOM APIs which need external polyfills loaded when used in web workers: + +- [`document`](https://developer.mozilla.org/docs/Web/API/Document) +- [`DOMParser`](https://developer.mozilla.org/docs/Web/API/DOMParser) +- [`Node`](https://developer.mozilla.org/docs/Web/API/Node) +- [`XMLSerializer`](https://developer.mozilla.org/docs/Web/API/XMLSerializer) + +#### Differences between Node.js and browsers + +There are differences between Node.js and browsers runtime. When getting started with this library, pay attention to APIs or classes marked with _"ONLY AVAILABLE IN NODE.JS RUNTIME"_ or _"ONLY AVAILABLE IN BROWSERS"_. + +- If a blob holds compressed data in `gzip` or `deflate` format and its content encoding is set accordingly, downloading behavior is different between Node.js and browsers. In Node.js storage clients will download the blob in its compressed format, while in browsers the data will be downloaded in de-compressed format. + +##### Features, interfaces, classes or functions only available in Node.js + +- Shared Key Authorization based on account name and account key + - `StorageSharedKeyCredential` +- Shared Access Signature(SAS) generation + - `generateAccountSASQueryParameters()` + - `generateBlobSASQueryParameters()` +- Parallel uploading and downloading. Note that `BlockBlobClient.uploadData()` is available in both Node.js and browsers. + - `BlockBlobClient.uploadFile()` + - `BlockBlobClient.uploadStream()` + - `BlobClient.downloadToBuffer()` + - `BlobClient.downloadToFile()` + +##### Features, interfaces, classes or functions only available in browsers + +- Parallel uploading and downloading + - `BlockBlobClient.uploadBrowserData()` + +### JavaScript Bundle + +To use this client library in the browser, first you need to use a bundler. For details on how to do this, please refer to our [bundling documentation](https://aka.ms/AzureSDKBundling). + +### CORS + +You need to set up [Cross-Origin Resource Sharing (CORS)](https://docs.microsoft.com/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services) rules for your storage account if you need to develop for browsers. Go to Azure portal and Azure Storage Explorer, find your storage account, create new CORS rules for blob/queue/file/table service(s). + +For example, you can create following CORS settings for debugging. But please customize the settings carefully according to your requirements in production environment. + +- Allowed origins: \* +- Allowed verbs: DELETE,GET,HEAD,MERGE,POST,OPTIONS,PUT +- Allowed headers: \* +- Exposed headers: \* +- Maximum age (seconds): 86400 + +## Key concepts + +Blob storage is designed for: + +- Serving images or documents directly to a browser. +- Storing files for distributed access. +- Streaming video and audio. +- Writing to log files. +- Storing data for backup and restore, disaster recovery, and archiving. +- Storing data for analysis by an on-premises or Azure-hosted service. + +Blob storage offers three types of resources: + +- The _storage account_ used via `BlobServiceClient` +- A _container_ in the storage account used via `ContainerClient` +- A _blob_ in a container used via `BlobClient` + +## Examples + +- [Import the package](#import-the-package) +- [Create the blob service client](#create-the-blob-service-client) +- [Create a new container](#create-a-new-container) +- [List the containers](#list-the-containers) +- [Create a blob by uploading data](#create-a-blob-by-uploading-data) +- [List blobs inside a container](#list-blobs-inside-a-container) +- [Download a blob and convert it to a string (Node.js)](#download-a-blob-and-convert-it-to-a-string-nodejs) +- [Download a blob and convert it to a string (Browsers)](#download-a-blob-and-convert-it-to-a-string-browsers) + +### Import the package + +To use the clients, import the package into your file: + +```javascript +const AzureStorageBlob = require("@azure/storage-blob"); +``` + +Alternatively, selectively import only the types you need: + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); +``` + +### Create the blob service client + +The `BlobServiceClient` requires an URL to the blob service and an access credential. It also optionally accepts some settings in the `options` parameter. + +#### with `DefaultAzureCredential` from `@azure/identity` package + +**Recommended way to instantiate a `BlobServiceClient`** + +Setup : Reference - Authorize access to blobs and queues with Azure Active Directory from a client application - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-app + +- Register a new AAD application and give permissions to access Azure Storage on behalf of the signed-in user + + - Register a new application in the Azure Active Directory(in the azure-portal) - https://docs.microsoft.com/azure/active-directory/develop/quickstart-register-app + - In the `API permissions` section, select `Add a permission` and choose `Microsoft APIs`. + - Pick `Azure Storage` and select the checkbox next to `user_impersonation` and then click `Add permissions`. This would allow the application to access Azure Storage on behalf of the signed-in user. + +- Grant access to Azure Blob data with RBAC in the Azure Portal + + - RBAC roles for blobs and queues - https://docs.microsoft.com/azure/storage/common/storage-auth-aad-rbac-portal. + - In the azure portal, go to your storage-account and assign **Storage Blob Data Contributor** role to the registered AAD application from `Access control (IAM)` tab (in the left-side-navbar of your storage account in the azure-portal). + +- Environment setup for the sample + - From the overview page of your AAD Application, note down the `CLIENT ID` and `TENANT ID`. In the "Certificates & Secrets" tab, create a secret and note that down. + - Make sure you have AZURE_TENANT_ID, AZURE_CLIENT_ID, AZURE_CLIENT_SECRET as environment variables to successfully execute the sample(Can leverage process.env). + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +// Enter your storage account name +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); +``` + +See the [Azure AD Auth sample](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/javascript/azureAdAuth.js) for a complete example using this method. + +[Note - Above steps are only for Node.js] + +#### using connection string + +Alternatively, you can instantiate a `BlobServiceClient` using the `fromConnectionString()` static method with the full connection string as the argument. (The connection string can be obtained from the azure portal.) [ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const connStr = ""; + +const blobServiceClient = BlobServiceClient.fromConnectionString(connStr); +``` + +#### with `StorageSharedKeyCredential` + +Alternatively, you instantiate a `BlobServiceClient` with a `StorageSharedKeyCredential` by passing account-name and account-key as arguments. (The account-name and account-key can be obtained from the azure portal.) +[ONLY AVAILABLE IN NODE.JS RUNTIME] + +```javascript +const { BlobServiceClient, StorageSharedKeyCredential } = require("@azure/storage-blob"); + +// Enter your storage account name and shared key +const account = ""; +const accountKey = ""; + +// Use StorageSharedKeyCredential with storage account and account key +// StorageSharedKeyCredential is only available in Node.js runtime, not in browsers +const sharedKeyCredential = new StorageSharedKeyCredential(account, accountKey); +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + sharedKeyCredential +); +``` + +#### with SAS Token + +Also, You can instantiate a `BlobServiceClient` with a shared access signatures (SAS). You can get the SAS token from the Azure Portal or generate one using `generateAccountSASQueryParameters()`. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); +``` + +### Create a new container + +Use `BlobServiceClient.getContainerClient()` to get a container client instance then create a new container resource. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + // Create a container + const containerName = `newcontainer${new Date().getTime()}`; + const containerClient = blobServiceClient.getContainerClient(containerName); + const createContainerResponse = await containerClient.create(); + console.log(`Create container ${containerName} successfully`, createContainerResponse.requestId); +} + +main(); +``` + +### List the containers + +Use `BlobServiceClient.listContainers()` function to iterate the containers, +with the new `for-await-of` syntax: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let containers = blobServiceClient.listContainers(); + for await (const container of containers) { + console.log(`Container ${i++}: ${container.name}`); + } +} + +main(); +``` + +Alternatively without using `for-await-of`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + let iter = blobServiceClient.listContainers(); + let containerItem = await iter.next(); + while (!containerItem.done) { + console.log(`Container ${i++}: ${containerItem.value.name}`); + containerItem = await iter.next(); + } +} + +main(); +``` + +In addition, pagination is supported for listing too via `byPage()`: + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +async function main() { + let i = 1; + for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + if (response.containerItems) { + for (const container of response.containerItems) { + console.log(`Container ${i++}: ${container.name}`); + } + } + } +} + +main(); +``` + +For a complete sample on iterating containers please see [samples/v12/typescript/src/listContainers.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listContainers.ts). + +### Create a blob by uploading data + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + const content = "Hello world!"; + const blobName = "newblob" + new Date().getTime(); + const blockBlobClient = containerClient.getBlockBlobClient(blobName); + const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + console.log(`Upload block blob ${blobName} successfully`, uploadBlobResponse.requestId); +} + +main(); +``` + +### List blobs inside a container + +Similar to listing containers. + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + + let i = 1; + let blobs = containerClient.listBlobsFlat(); + for await (const blob of blobs) { + console.log(`Blob ${i++}: ${blob.name}`); + } +} + +main(); +``` + +For a complete sample on iterating blobs please see [samples/v12/typescript/src/listBlobsFlat.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/listBlobsFlat.ts). + +### Download a blob and convert it to a string (Node.js) + +```javascript +const { DefaultAzureCredential } = require("@azure/identity"); +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const defaultAzureCredential = new DefaultAzureCredential(); + +const blobServiceClient = new BlobServiceClient( + `https://${account}.blob.core.windows.net`, + defaultAzureCredential +); + +const containerName = ""; +const blobName = ""; + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In Node.js, get downloaded data by accessing downloadBlockBlobResponse.readableStreamBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = ( + await streamToBuffer(downloadBlockBlobResponse.readableStreamBody) + ).toString(); + console.log("Downloaded blob content:", downloaded); + + // [Node.js only] A helper method used to read a Node.js readable stream into a Buffer + async function streamToBuffer(readableStream) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); + } +} + +main(); +``` + +### Download a blob and convert it to a string (Browsers). + +Please refer to the [JavaScript Bundle](#javascript-bundle) section for more information on using this library in the browser. + +```javascript +const { BlobServiceClient } = require("@azure/storage-blob"); + +const account = ""; +const sas = ""; +const containerName = ""; +const blobName = ""; + +const blobServiceClient = new BlobServiceClient(`https://${account}.blob.core.windows.net${sas}`); + +async function main() { + const containerClient = blobServiceClient.getContainerClient(containerName); + const blobClient = containerClient.getBlobClient(blobName); + + // Get blob content from position 0 to the end + // In browsers, get downloaded data by accessing downloadBlockBlobResponse.blobBody + const downloadBlockBlobResponse = await blobClient.download(); + const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + console.log("Downloaded blob content", downloaded); + + // [Browsers only] A helper method used to convert a browser Blob into string. + async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); + } +} + +main(); +``` + +A complete example of simple scenarios is at [samples/v12/typescript/src/sharedKeyAuth.ts](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/samples/v12/typescript/src/sharedKeyAuth.ts). + +## Troubleshooting + +Enabling logging may help uncover useful information about failures. In order to see a log of HTTP requests and responses, set the `AZURE_LOG_LEVEL` environment variable to `info`. Alternatively, logging can be enabled at runtime by calling `setLogLevel` in the `@azure/logger`: + +```javascript +const { setLogLevel } = require("@azure/logger"); + +setLogLevel("info"); +``` + +## Next steps + +More code samples: + +- [Blob Storage Samples (JavaScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/javascript) +- [Blob Storage Samples (TypeScript)](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/samples/v12/typescript) +- [Blob Storage Test Cases](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/storage/storage-blob/test/) + +## Contributing + +If you'd like to contribute to this library, please read the [contributing guide](https://github.com/Azure/azure-sdk-for-js/blob/main/CONTRIBUTING.md) to learn more about how to build and test the code. + +Also refer to [Storage specific guide](https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/CONTRIBUTING.md) for additional information on setting up the test environment for storage libraries. + +![Impressions](https://azure-sdk-impressions.azurewebsites.net/api/impressions/azure-sdk-for-js%2Fsdk%2Fstorage%2Fstorage-blob%2FREADME.png) diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js new file mode 100644 index 0000000000..bd2fea4019 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=BatchResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map new file mode 100644 index 0000000000..43638fcd3a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponse.js","sourceRoot":"","sources":["../../../src/BatchResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { HttpHeaders } from \"@azure/core-http\";\n\n/**\n * The response data associated with a single request within a batch operation.\n */\nexport interface BatchSubResponse {\n /**\n * The status code of the sub operation.\n */\n status: number;\n\n /**\n * The status message of the sub operation.\n */\n statusMessage: string;\n\n /**\n * The error code of the sub operation, if the sub operation failed.\n */\n errorCode?: string;\n\n /**\n * The HTTP response headers.\n */\n headers: HttpHeaders;\n\n /**\n * The body as text.\n */\n bodyAsText?: string;\n\n /**\n * The batch sub request corresponding to the sub response.\n */\n _request: BatchSubRequest;\n}\n\n/**\n * The multipart/mixed response which contains the response for each subrequest.\n */\nexport interface ParsedBatchResponse {\n /**\n * The parsed sub responses.\n */\n subResponses: BatchSubResponse[];\n\n /**\n * The succeeded executed sub responses' count;\n */\n subResponsesSucceededCount: number;\n\n /**\n * The failed executed sub responses' count;\n */\n subResponsesFailedCount: number;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js new file mode 100644 index 0000000000..1d083adca5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js @@ -0,0 +1,136 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders } from "@azure/core-http"; +import { HTTP_VERSION_1_1, HTTP_LINE_ENDING, HeaderConstants, HTTPURLConnection, } from "./utils/constants"; +import { getBodyAsText } from "./BatchUtils"; +import { logger } from "./log"; +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +export class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} +//# sourceMappingURL=BatchResponseParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map new file mode 100644 index 0000000000..818e7e9b71 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchResponseParser.js","sourceRoot":"","sources":["../../../src/BatchResponseParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAG/C,OAAO,EACL,gBAAgB,EAChB,gBAAgB,EAChB,eAAe,EACf,iBAAiB,GAClB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,aAAa,EAAE,MAAM,cAAc,CAAC;AAG7C,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAE/B,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;GAEG;AACH,MAAM,OAAO,mBAAmB;IAO9B,YACE,aAA8C,EAC9C,WAAyC;QAEzC,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;YAChD,+FAA+F;YAC/F,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;SAC3F;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;YAC1C,wCAAwC;YACxC,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;SAClF;QAED,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,KAAK,IAAI,CAAC,qBAAqB,GAAG,gBAAgB,EAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,KAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;IACjE,CAAC;IAED,yHAAyH;IAClH,KAAK,CAAC,kBAAkB;QAC7B,uGAAuG;QACvG,0BAA0B;QAC1B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;YAC3E,MAAM,IAAI,KAAK,CACb,qDAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,IAAI,CAC7F,CAAC;SACH;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC,CAAC,iCAAiC;aACpE,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;aAC7B,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,mDAAmD;QAChE,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;QAE7C,uDAAuD;QACvD,4EAA4E;QAC5E,+FAA+F;QAC/F,uGAAuG;QACvG,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;YACxE,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;SAC7F;QAED,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;QAExC,0BAA0B;QAC1B,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;YACrD,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;YACvD,uBAAuB,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,GAAG,gBAAgB,EAAE,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;YAE1B,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;oBAC5B,yCAAyC;oBACzC,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;wBACvD,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;qBACpE;oBAED,oFAAoF;oBACpF,iCAAiC;oBACjC,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;wBACrD,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;qBAC/E;oBAED,SAAS,CAAC,iHAAiH;iBAC5H;gBAED,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;oBAC9B,4GAA4G;oBAC5G,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;qBAC9B;oBAED,SAAS,CAAC,kBAAkB;iBAC7B;gBAED,2EAA2E;gBAC3E,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;wBACtD,2DAA2D;wBAC3D,MAAM,IAAI,KAAK,CACb,uCAAuC,YAAY,oCAAoC,qBAAqB,IAAI,CACjH,CAAC;qBACH;oBAED,iCAAiC;oBACjC,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;oBACzD,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;wBACjD,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;qBACtB;iBACF;qBAAM;oBACL,iCAAiC;oBACjC,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;wBACvC,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;qBACzC;oBAED,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;iBACpD;aACF,CAAC,gBAAgB;YAElB,kHAAkH;YAClH,uHAAuH;YACvH,oHAAoH;YACpH,uHAAuH;YACvH,IACE,SAAS,KAAK,SAAS;gBACvB,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;gBAC3B,SAAS,IAAI,CAAC;gBACd,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;gBACjC,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;gBACpE,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;aAC/D;iBAAM;gBACL,MAAM,CAAC,KAAK,CACV,gBAAgB,KAAK,uEAAuE,SAAS,EAAE,CACxG,CAAC;aACH;YAED,IAAI,aAAa,EAAE;gBACjB,uBAAuB,EAAE,CAAC;aAC3B;iBAAM;gBACL,0BAA0B,EAAE,CAAC;aAC9B;SACF;QAED,OAAO;YACL,YAAY,EAAE,wBAAwB;YACtC,0BAA0B,EAAE,0BAA0B;YACtD,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;IACJ,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js new file mode 100644 index 0000000000..1a3926a1ac --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { blobToString } from "./utils/utils.browser"; +export async function getBodyAsText(batchResponse) { + const blob = (await batchResponse.blobBody); + return blobToString(blob); +} +export function utf8ByteLength(str) { + return new Blob([str]).size; +} +//# sourceMappingURL=BatchUtils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map new file mode 100644 index 0000000000..3d84ba60bc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.browser.js","sourceRoot":"","sources":["../../../src/BatchUtils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAErD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,MAAM,IAAI,GAAG,CAAC,MAAM,aAAa,CAAC,QAAQ,CAAS,CAAC;IACpD,OAAO,YAAY,CAAC,IAAI,CAAC,CAAC;AAC5B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,IAAI,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { blobToString } from \"./utils/utils.browser\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n const blob = (await batchResponse.blobBody) as Blob;\n return blobToString(blob);\n}\n\nexport function utf8ByteLength(str: string): number {\n return new Blob([str]).size;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js new file mode 100644 index 0000000000..35be3b10b8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { streamToBuffer2 } from "./utils/utils.node"; +import { BATCH_MAX_PAYLOAD_IN_BYTES } from "./utils/constants"; +export async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +export function utf8ByteLength(str) { + return Buffer.byteLength(str); +} +//# sourceMappingURL=BatchUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map new file mode 100644 index 0000000000..959a5a1e3e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchUtils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BatchUtils.js","sourceRoot":"","sources":["../../../src/BatchUtils.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAE/D,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,aAA8C;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;IAEF,6CAA6C;IAC7C,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;IAEzC,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAED,MAAM,UAAU,cAAc,CAAC,GAAW;IACxC,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js new file mode 100644 index 0000000000..6de406bf98 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js @@ -0,0 +1,282 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, generateUuid, HttpHeaders, WebResource, isTokenCredential, bearerTokenAuthenticationPolicy, isNode, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { BlobClient } from "./Clients"; +import { Mutex } from "./utils/Mutex"; +import { Pipeline } from "./Pipeline"; +import { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common"; +import { HeaderConstants, BATCH_MAX_REQUEST, HTTP_VERSION_1_1, HTTP_LINE_ENDING, StorageOAuthScopes, } from "./utils/constants"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { createSpan } from "./utils/tracing"; +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = isTokenCredential(credential) + ? attachCredential(bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new WebResource(), + status: 200, + headers: new HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=BlobBatch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map new file mode 100644 index 0000000000..a6351fa939 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatch.js","sourceRoot":"","sources":["../../../src/BlobBatch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,YAAY,EACZ,WAAW,EAKX,WAAW,EAEX,iBAAiB,EACjB,+BAA+B,EAC/B,MAAM,GACP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,UAAU,EAAyC,MAAM,WAAW,CAAC;AAE9E,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAC;AACtC,OAAO,EAAE,gBAAgB,EAAE,UAAU,EAAE,kBAAkB,EAAE,MAAM,EAAE,MAAM,sBAAsB,CAAC;AAChG,OAAO,EACL,eAAe,EACf,iBAAiB,EACjB,gBAAgB,EAChB,gBAAgB,EAChB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAmB7C;;;GAGG;AACH,MAAM,OAAO,SAAS;IAKpB;QAHiB,UAAK,GAAW,OAAO,CAAC;QAIvC,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACI,uBAAuB;QAC5B,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;IACrD,CAAC;IAED;;OAEG;IACI,kBAAkB;QACvB,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;IAChD,CAAC;IAED;;OAEG;IACI,cAAc;QACnB,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;IAC5C,CAAC;IAEO,KAAK,CAAC,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;YAC/B,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;SACjD;gBAAS;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;SAChC;IACH,CAAC;IAEO,YAAY,CAAC,SAAqC;QACxD,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;SAC5B;QACD,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,yFAAyF,IAAI,CAAC,SAAS,cAAc,CACtH,CAAC;SACH;IACH,CAAC;IAqCM,KAAK,CAAC,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B;QAE3B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,mBAAmB,YAAY,0BAA0B,CAAC;gBACpE,mBAAmB,YAAY,mBAAmB;gBAClD,iBAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;SAClC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;SACpD;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAgDM,KAAK,CAAC,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B;QAE5B,IAAI,GAAW,CAAC;QAChB,IAAI,UAA8E,CAAC;QACnF,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;YACnC,CAAC,CAAC,MAAM,IAAI,gBAAgB,YAAY,0BAA0B,CAAC;gBACjE,gBAAgB,YAAY,mBAAmB;gBAC/C,iBAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;YACA,iBAAiB;YACjB,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;SACpC;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;YAChD,kBAAkB;YAClB,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;YAC1B,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;SAC/C;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;SACH;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;YACF,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;gBACE,GAAG,EAAE,GAAG;gBACR,UAAU,EAAE,UAAU;aACvB,EACD,KAAK,IAAI,EAAE;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;YACJ,CAAC,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAED;;;GAGG;AACH,MAAM,iBAAiB;IASrB;QACE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;QACxB,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;QAEf,MAAM,QAAQ,GAAG,YAAY,EAAE,CAAC;QAEhC,kBAAkB;QAClB,IAAI,CAAC,QAAQ,GAAG,SAAS,QAAQ,EAAE,CAAC;QACpC,oBAAoB;QACpB,iCAAiC;QACjC,oCAAoC;QACpC,IAAI,CAAC,gBAAgB,GAAG,KAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,GAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,GAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;QACxL,4CAA4C;QAC5C,IAAI,CAAC,oBAAoB,GAAG,6BAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;QACzE,sBAAsB;QACtB,IAAI,CAAC,kBAAkB,GAAG,KAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;QAEjD,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;IAC/B,CAAC;IAED;;;;;;OAMG;IACI,cAAc,CACnB,UAA8E;QAE9E,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;QACnE,MAAM,mBAAmB,GAAG,CAAC,GAAG,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,mHAAmH;QAC/K,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAG,qBAAqB,EAAE,CAAC,CAAC,8DAA8D;QACtG,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC,CAAC,gEAAgE;QACrH,IAAI,CAAC,gBAAgB,EAAE;YACrB,SAAS,CAAC,CAAC,CAAC,GAAG,iBAAiB,CAAC,UAAU,CAAC;gBAC1C,CAAC,CAAC,gBAAgB,CACd,+BAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;gBACH,CAAC,CAAC,UAAU,CAAC;SAChB;QACD,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC,CAAC,yFAAyF;QAE3K,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IACrC,CAAC;IAEM,sBAAsB,CAAC,OAAoB;QAChD,gCAAgC;QAChC,IAAI,CAAC,IAAI,IAAI;YACX,IAAI,CAAC,gBAAgB;YACrB,GAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,EAAE;YACvD,EAAE;YACF,GAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,IAAI,gBAAgB,GAAG,gBAAgB,EAAE,EAAE,qCAAqC;SAClF,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,CAAC,IAAI,IAAI,GAAG,MAAM,CAAC,IAAI,KAAK,MAAM,CAAC,KAAK,GAAG,gBAAgB,EAAE,CAAC;SACnE;QAED,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC,CAAC,0DAA0D;QACzF,wDAAwD;QACxD,8BAA8B;IAChC,CAAC;IAEM,gBAAgB,CAAC,UAA2B;QACjD,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;YAC5C,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,iCAAiC,CAAC,CAAC;SAC3F;QAED,8CAA8C;QAC9C,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;QACxC,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,iCAAiC,UAAU,CAAC,GAAG,GAAG,CAAC,CAAC;SAC1E;IACH,CAAC;IAEM,iBAAiB,CAAC,UAA2B;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;IACxB,CAAC;IAED,wFAAwF;IACjF,kBAAkB;QACvB,OAAO,GAAG,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,kBAAkB,GAAG,gBAAgB,EAAE,CAAC;IACrE,CAAC;IAEM,uBAAuB;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;IACnC,CAAC;IAEM,cAAc;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;IAC1B,CAAC;CACF;AAED,MAAM,0BAA2B,SAAQ,iBAAiB;IAQxD,YACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B;QAE7B,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAXZ,kBAAa,GAA0B;YACtD,OAAO,EAAE,IAAI,WAAW,EAAE;YAC1B,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI,WAAW,EAAE;SAC3B,CAAC;QASA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;QAExD,OAAO,IAAI,CAAC,aAAa,CAAC,CAAC,uCAAuC;IACpE,CAAC;CACF;AAED,MAAM,iCAAiC;IAGrC,YAAY,YAA+B;QACzC,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;IAChF,CAAC;CACF;AAED,MAAM,uBAAwB,SAAQ,iBAAiB;IACrD,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;gBACrD,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;aAC7B;SACF;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC,CAAC,2DAA2D;SACnG;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF;AAED,MAAM,8BAA8B;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC1D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js new file mode 100644 index 0000000000..f15f53dac4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BatchResponseParser } from "./BatchResponseParser"; +import { utf8ByteLength } from "./BatchUtils"; +import { BlobBatch } from "./BlobBatch"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { Service, Container } from "./generated/src/operations"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { getURLPath } from "./utils/utils.common"; +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobBatchClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map new file mode 100644 index 0000000000..5d47ea9154 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobBatchClient.js","sourceRoot":"","sources":["../../../src/BlobBatchClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AASlC,OAAO,EAAE,mBAAmB,EAAE,MAAM,uBAAuB,CAAC;AAC5D,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,SAAS,EAAE,MAAM,aAAa,CAAC;AACxC,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAEjF,OAAO,EAAE,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAEhE,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AAExE,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAC5E,OAAO,EAAwC,WAAW,EAAE,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AAiClD;;;;GAIG;AACH,MAAM,OAAO,eAAe;IA8B1B,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IAAI,CAAC,oBAAoB,EAAE;YAChC,yBAAyB;YACzB,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM;YACL,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;QAED,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;QAE9F,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;QAC7B,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;YACxB,oBAAoB;YACpB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;SACtE;aAAM;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;SACpE;IACH,CAAC;IAED;;;OAGG;IACI,WAAW;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;IACzB,CAAC;IAsCM,KAAK,CAAC,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;IACb,mFAAmF;IACnF,gEAAgE;IAChE,OAA2B;QAE3B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;aAC1F;iBAAM;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;aACnF;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAkDM,KAAK,CAAC,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;IAC/C,mFAAmF;IACnF,gEAAgE;IAChE,OAA4B;QAE5B,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;QAC9B,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;YAC/C,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;aACH;iBAAM;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;aACH;SACF;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACjC,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkCG;IACI,KAAK,CAAC,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;YAC7D,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;SAChF;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;YAE3D,2FAA2F;YAC3F,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,kCAEX,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEJ,gHAAgH;YAChH,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;YACF,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;YAEvE,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js new file mode 100644 index 0000000000..567242bdb6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BlobDownloadResponse" for some browser bundlers +// when trying to bundle "BlobDownloadResponse" +// "BlobDownloadResponse" class is only available in Node.js runtime +export const BlobDownloadResponse = 1; +//# sourceMappingURL=BlobDownloadResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map new file mode 100644 index 0000000000..f1a82af1c9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,kFAAkF;AAClF,+CAA+C;AAC/C,oEAAoE;AACpE,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BlobDownloadResponse\" for some browser bundlers\n// when trying to bundle \"BlobDownloadResponse\"\n// \"BlobDownloadResponse\" class is only available in Node.js runtime\nexport const BlobDownloadResponse = 1;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js new file mode 100644 index 0000000000..858c3b6eed --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js @@ -0,0 +1,455 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { RetriableReadableStream, } from "./utils/RetriableReadableStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +export class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobDownloadResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map new file mode 100644 index 0000000000..d5a70d401d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobDownloadResponse.js","sourceRoot":"","sources":["../../../src/BlobDownloadResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAYxD,OAAO,EAEL,uBAAuB,GAExB,MAAM,iCAAiC,CAAC;AAEzC;;;;;;;;;GASG;AACH,MAAM,OAAO,oBAAoB;IA6d/B;;;;;;;;OAQG;IACH,YACE,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;IACJ,CAAC;IApfD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,IAAW,gBAAgB;QACzB,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;IAChD,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,IAAW,oCAAoC;QAC7C,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;IACpE,CAAC;IAED;;;;OAIG;IACH,IAAW,iCAAiC;QAC1C,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;IACjE,CAAC;IAED;;;;OAIG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,IAAW,2BAA2B;QACpC,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;IAC3D,CAAC;IAED;;;;OAIG;IACH,IAAW,sBAAsB;QAC/B,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;IACtD,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CA8BF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js new file mode 100644 index 0000000000..796c15b78e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { generateUuid } from "@azure/core-http"; +import { StorageClientContext } from "./generated/src/index"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Blob as StorageBlob, Container } from "./generated/src/operations"; +import { ETagNone } from "./utils/constants"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new StorageBlob(clientContext); + } + if (!leaseId) { + leaseId = generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=BlobLeaseClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map new file mode 100644 index 0000000000..6ab89f4031 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobLeaseClient.js","sourceRoot":"","sources":["../../../src/BlobLeaseClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,YAAY,EAAgB,MAAM,kBAAkB,CAAC;AAC9D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAG5E,OAAO,EAAE,QAAQ,EAAE,MAAM,mBAAmB,CAAC;AAC7C,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAqFjF;;GAEG;AACH,MAAM,OAAO,eAAe;IAwB1B;;;;OAIG;IACH,YAAY,MAAoC,EAAE,OAAgB;QAChE,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;QACF,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;QAEvB,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;YAC7C,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;SAC/D;aAAM;YACL,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAI,WAAW,CAAC,aAAa,CAAC,CAAC;SACjE;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,YAAY,EAAE,CAAC;SAC1B;QACD,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IA1CD;;;;OAIG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,IAAW,GAAG;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;IACnB,CAAC;IA4BD;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,IAAI,CAAC,QAAQ,IAC3B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YACF,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;YAChC,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CAAC,UAAiC,EAAE;;QAC3D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CAAC,UAAiC,EAAE;;QACzD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE;;QAEnC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;YACjB,CAAC,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ,CAAC;gBACxE,CAAC,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,MAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;iBACjF,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;YACA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;SACH;QAED,IAAI;YACF,MAAM,gBAAgB,mBACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;SAC1E;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js new file mode 100644 index 0000000000..f78c5b50c2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js @@ -0,0 +1,362 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN BROWSER RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, _options = {}) { + this.originalResponse = originalResponse; + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + throw Error(`Quick query in browser is not supported yet.`); + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * @readonly + */ + get readableStreamBody() { + return undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map new file mode 100644 index 0000000000..6d4069e736 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.browser.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAiBlC;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IA+X5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,WAAwC,EAAE;QAE1C,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;IAC3C,CAAC;IAzYD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,MAAM,KAAK,CAAC,8CAA8C,CAAC,CAAC;IAC9D,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAgBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN BROWSER RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in browser runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n throw Error(`Quick query in browser is not supported yet.`);\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n _options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js new file mode 100644 index 0000000000..91dac64350 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js @@ -0,0 +1,367 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-http"; +import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +export class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} +//# sourceMappingURL=BlobQueryResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map new file mode 100644 index 0000000000..44f63e6014 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQueryResponse.js","sourceRoot":"","sources":["../../../src/BlobQueryResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAgB,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAaxD,OAAO,EAAE,oBAAoB,EAA+B,MAAM,8BAA8B,CAAC;AAEjG;;;;;GAKG;AACH,MAAM,OAAO,iBAAiB;IAkY5B;;;;;OAKG;IACH,YACE,gBAAwC,EACxC,UAAuC,EAAE;QAEzC,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;IACJ,CAAC;IAhZD;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,kBAAkB;QAC3B,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;;;;;;;OAWG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,eAAe;QACxB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;OAKG;IACH,IAAW,MAAM;QACf,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;IACtC,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,qBAAqB;QAC9B,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;IACrD,CAAC;IAED;;;;;;OAMG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,IAAW,UAAU;QACnB,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;IAC1C,CAAC;IAED;;;;;OAKG;IACH,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;;OAKG;IACH,IAAW,uBAAuB;QAChC,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;IACvD,CAAC;IAED;;;;;OAKG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;IACjD,CAAC;IAED;;;;;;;;OAQG;IACH,IAAW,cAAc;QACvB,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;IAC9C,CAAC;IAED;;;;;;OAMG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;IACxC,CAAC;IAED;;;;;OAKG;IACH,IAAW,SAAS;QAClB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,IAAW,eAAe;QACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;IAC/C,CAAC;IAED;;;;;OAKG;IACH,IAAW,OAAO;QAChB,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,IAAW,mBAAmB;QAC5B,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACH,IAAW,YAAY;QACrB,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;IAC5C,CAAC;IAED;;;;;OAKG;IACH,IAAW,QAAQ;QACjB,OAAO,SAAS,CAAC;IACnB,CAAC;IAED;;;;;;;OAOG;IACH,IAAW,kBAAkB;QAC3B,OAAO,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC;IACtD,CAAC;IAED;;OAEG;IACH,IAAW,SAAS;QAGlB,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;IACzC,CAAC;CAqBF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js new file mode 100644 index 0000000000..327cf23d07 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js @@ -0,0 +1,786 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isTokenCredential, isNode, getDefaultProxySettings, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { Container, Service } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { ContainerClient, } from "./ContainerClient"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, toTags, } from "./utils/utils.common"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import "@azure/core-paging"; +import { truncatedISO8061Date } from "./utils/utils.common"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { BlobBatchClient } from "./BlobBatchClient"; +import { StorageClient } from "./StorageClient"; +import { AccountSASPermissions } from "./sas/AccountSASPermissions"; +import { generateAccountSASQueryParameters } from "./sas/AccountSASSignatureValues"; +import { AccountSASServices } from "./sas/AccountSASServices"; +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield __await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield __await(yield __await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} +//# sourceMappingURL=BlobServiceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map new file mode 100644 index 0000000000..16008dfdd8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobServiceClient.js","sourceRoot":"","sources":["../../../src/BlobServiceClient.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAEL,iBAAiB,EACjB,MAAM,EAEN,uBAAuB,GACxB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAsBrD,OAAO,EAAE,SAAS,EAAE,OAAO,EAAE,MAAM,4BAA4B,CAAC;AAChE,OAAO,EAAE,WAAW,EAAwC,cAAc,EAAE,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,eAAe,GAGhB,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,oBAAoB,CAAC;AAE5B,OAAO,EAAE,oBAAoB,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,qBAAqB,EAAE,MAAM,6BAA6B,CAAC;AAGpE,OAAO,EAAE,iCAAiC,EAAE,MAAM,iCAAiC,CAAC;AACpF,OAAO,EAAE,kBAAkB,EAAE,MAAM,0BAA0B,CAAC;AA4S9D;;;GAGG;AACH,MAAM,OAAO,iBAAkB,SAAQ,aAAa;IAuGlD,YACE,GAAW,EACX,oBAIgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;SACjC;aAAM,IACL,CAAC,MAAM,IAAI,oBAAoB,YAAY,0BAA0B,CAAC;YACtE,oBAAoB,YAAY,mBAAmB;YACnD,iBAAiB,CAAC,oBAAoB,CAAC,EACvC;YACA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;SACvD;aAAM;YACL,8DAA8D;YAC9D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IAC/D,CAAC;IA3HD;;;;;;;;;;;OAWG;IACI,MAAM,CAAC,oBAAoB,CAChC,gBAAwB;IACxB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QACtE,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;YAC/C,IAAI,MAAM,EAAE;gBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;gBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;iBACzE;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;aACvF;SACF;aAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;YACjE,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;SAC9F;aAAM;YACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;SACH;IACH,CAAC;IAiFD;;;;;;;;;;;OAWG;IACI,kBAAkB,CAAC,aAAqB;QAC7C,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE;QAKpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE;QAK7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;YACF,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,IACpB,cAAc,EACjB,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;SACvD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACH,gEAAgE;IAChE,8HAA8H;IACtH,KAAK,CAAC,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE;;QAK3C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;YAC1E,qCAAqC;YACrC,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,kCAC5E,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,0CAAE,OAAO,IAC/C,CAAC;YACH,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;SACrD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,6CACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,IACH,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,KAC/E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,iBACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA8EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAAwC,EAAE;QAE1C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA+C,EAAE;;YAEjD,IAAI,6BAA6B,CAAC;YAClC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,cAAM,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAClF,6BAA6B,CAAC,cAAc;wBAC1C,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;oBACrD,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;oBACzD,oBAAM,cAAM,6BAA6B,CAAA,CAAA,CAAC;iBAC3C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA+C,EAAE;;;YAEjD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;iBAC/B;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAyEG;IACI,cAAc,CACnB,UAAwC,EAAE;QAE1C,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;YACzB,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;SACxB;QAED,mDAAmD;QACnD,MAAM,kBAAkB,mCACnB,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CAC3C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;gBACE,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBAC/C,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,kBAEC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;YAEF,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;gBACjD,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;YAEF,MAAM,GAAG,mBACP,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,IAC1B,iBAAiB,CACrB,CAAC;YAEF,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,qBAAqB,CAC1B,SAAgB,EAChB,cAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,gBAAwB,KAAK,EAC7B,UAA+C,EAAE;QAEjD,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;YAC5D,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;SACH;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;YAC3B,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;YACvB,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;SACnD;QAED,MAAM,GAAG,GAAG,iCAAiC,iBAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,IAC/C,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IACzC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container.\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js new file mode 100644 index 0000000000..438cb33702 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js @@ -0,0 +1,2751 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { generateUuid, getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { BlobDownloadResponse } from "./BlobDownloadResponse"; +import { BlobQueryResponse } from "./BlobQueryResponse"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from "./generated/src/operations"; +import { ensureCpkIfSpecified, toAccessTier, } from "./models"; +import { rangeResponseFromModel, } from "./PageBlobRangeResponse"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { BlobBeginCopyFromUrlPoller, } from "./pollers/BlobStartCopyFromUrlPoller"; +import { rangeToString } from "./Range"; +import { StorageClient } from "./StorageClient"; +import { Batch } from "./utils/Batch"; +import { BufferScheduler } from "../../storage-common/src"; +import { BlobDoesNotUseCustomerSpecifiedEncryption, BlobUsesCustomerSpecifiedEncryptionMsg, BLOCK_BLOB_MAX_BLOCKS, BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES, BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES, DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES, DEFAULT_BLOCK_BUFFER_SIZE_BYTES, DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS, ETagAny, URLConstants, } from "./utils/constants"; +import { createSpan, convertTracingToRequestOptionsBase } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, ExtractPageRangeInfoItems, generateBlockID, getURLParameter, httpAuthorizationToString, isIpEndpointStyle, parseObjectReplicationRecord, setURLParameter, toBlobTags, toBlobTagsString, toQuerySerialization, toTags, } from "./utils/utils.common"; +import { fsCreateReadStream, fsStat, readStreamToLocalFile, streamToBuffer, } from "./utils/utils.node"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new StorageBlob(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new StorageBlob(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return __asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield __await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield __await(yield __await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return __asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +//# sourceMappingURL=Clients.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map new file mode 100644 index 0000000000..0f8b3e7a97 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Clients.js","sourceRoot":"","sources":["../../../src/Clients.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,YAAY,EACZ,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAGjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AAGrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAC9D,OAAO,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAC;AACxD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,UAAU,EAAE,IAAI,IAAI,WAAW,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,4BAA4B,CAAC;AA8ClG,OAAO,EAKL,oBAAoB,EAMpB,YAAY,GAQb,MAAM,UAAU,CAAC;AAClB,OAAO,EAGL,sBAAsB,GACvB,MAAM,yBAAyB,CAAC;AACjC,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EACL,0BAA0B,GAG3B,MAAM,sCAAsC,CAAC;AAC9C,OAAO,EAAS,aAAa,EAAE,MAAM,SAAS,CAAC;AAC/C,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,KAAK,EAAE,MAAM,eAAe,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3D,OAAO,EACL,yCAAyC,EACzC,sCAAsC,EACtC,qBAAqB,EACrB,gCAAgC,EAChC,gCAAgC,EAChC,iCAAiC,EACjC,+BAA+B,EAC/B,mCAAmC,EACnC,OAAO,EACP,YAAY,GACb,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,UAAU,EAAE,kCAAkC,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,4BAA4B,EAC5B,yBAAyB,EACzB,eAAe,EACf,eAAe,EACf,yBAAyB,EACzB,iBAAiB,EACjB,4BAA4B,EAC5B,eAAe,EACf,UAAU,EACV,gBAAgB,EAChB,oBAAoB,EACpB,MAAM,GACP,MAAM,sBAAsB,CAAC;AAC9B,OAAO,EACL,kBAAkB,EAClB,MAAM,EACN,qBAAqB,EACrB,cAAc,GACf,MAAM,oBAAoB,CAAC;AAG5B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAE9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAssBpD;;;GAGG;AACH,MAAM,OAAO,UAAW,SAAQ,aAAa;IAqF3C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QAED,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;YAC3D,IAAI,CAAC,+BAA+B,EAAE,CAAC,CAAC;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QAE9D,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;QACvF,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;IAC3F,CAAC;IArKD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAED;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IA2JD;;;;;;OAMG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;OAMG;IACI,WAAW,CAAC,SAAiB;QAClC,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;OAGG;IACI,mBAAmB;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACvD,CAAC;IAED;;;OAGG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA0DG;IACI,KAAK,CAAC,QAAQ,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACzC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,kBAAkB,EAAE,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,OAAO,CAAC,UAAU,EAAE,+DAA+D;iBAC7H,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,UAAU,mCACX,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,GAC5F,CAAC;YACF,sCAAsC;YACtC,IAAI,CAAC,MAAM,EAAE;gBACX,OAAO,UAAU,CAAC;aACnB;YAED,8EAA8E;YAC9E,uEAAuE;YACvE,uEAAuE;YACvE,sGAAsG;YACtG,gDAAgD;YAChD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;gBAC1E,uDAAuD;gBACvD,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;aAChE;YAED,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;gBACnC,MAAM,IAAI,UAAU,CAAC,oEAAoE,CAAC,CAAC;aAC5F;YAED,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;gBACb,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;aAClF;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,KAAK,EAAE,KAAa,EAAkC,EAAE;;gBACtD,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;oBACzC,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;wBAChD,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;wBACpD,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;wBAC5C,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;wBACxD,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;qBAC1C;oBACD,KAAK,EAAE,aAAa,CAAC;wBACnB,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;wBAC1C,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;gBAEF,qBAAqB;gBACrB,eAAe;gBACf,0CAA0C;gBAC1C,2BAA2B;gBAC3B,mDAAmD;gBACnD,KAAK;gBAEL,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBAC7B,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,sBAAsB,EACzB,CACH,CAAC,kBAAmB,CAAC;YACxB,CAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;aAC/B,CACF,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,kDAAkD;gBAClD,OAAO,KAAK,CAAC;aACd;iBAAM,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;gBACpB,CAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;oBAC7D,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;gBACA,kDAAkD;gBAClD,OAAO,IAAI,CAAC;aACb;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAoC,EAAE;;QAEtC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,iBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,uCACK,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,IAC3F;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAA6B,EAAE;;QACjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,iBAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,cAAc,CACzB,UAA6B,EAAE;;QAE/B,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,wEAAwE;iBAClF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,QAAQ,CAAC,UAA+B,EAAE;QACrD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAGxC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE;;QAC/D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,+BACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,IACtB,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,OAAO,CAAC,UAA8B,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,GACxD,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;OAKG;IACI,KAAK,CAAC,cAAc,CACzB,UAAqC,EAAE;;QAEvC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,KAAK,CAAC,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE;QAIzC,MAAM,MAAM,GAAyB;YACnC,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;YAC7D,aAAa,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;YACvD,gBAAgB,EAAE,CAAC,GAAG,IAAI,EAAE,EAAE,CAAC,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;QACF,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;YAC5C,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,uBAAuB,EAAE,OAAO;SACjC,CAAC,CAAC;QAEH,qDAAqD;QACrD,8DAA8D;QAC9D,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;QAEpB,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,kBACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE;;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,kBAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,IACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE;;QAEhC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IA8CM,KAAK,CAAC,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,SAAsC,EAAE;QAExC,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;YAChB,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;YACrB,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;SACjD;aAAM;YACL,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YACjD,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;YAChD,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;SACzD;QACD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;gBACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;aACvB;YACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;gBACzB,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;aACvD;YACD,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;gBAC3B,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;aACvD;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;gBACd,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;aACpD;YAED,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;gBACvB,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;aAC7D;YAED,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;gBACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;aACzB;YAED,0CAA0C;YAC1C,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,iCACpC,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;gBACH,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,UAAU,MAAM,uCAAuC,QAAQ,CAAC,aAAc,EAAE,CACjF,CAAC;iBACH;aACF;YAED,oEAAoE;YACpE,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;oBACF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;iBAC9B;gBAAC,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,0CAA0C,KAAK,qJAAqJ,KAAK,CAAC,OAAO,EAAE,CACpN,CAAC;iBACH;aACF;YAED,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;gBACzB,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,EAAE,CAC3F,CAAC;aACH;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;gBAC1E,KAAK,CAAC,YAAY,CAAC,KAAK,IAAI,EAAE;oBAC5B,+BAA+B;oBAC/B,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;oBAC/B,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;wBACvC,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;qBACrC;oBACD,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;qBACF,CAAC,CAAC;oBACH,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;oBAC5C,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;oBACvE,qEAAqE;oBACrE,sEAAsE;oBACtE,oDAAoD;oBACpD,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;qBACvD;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YACjB,OAAO,MAAM,CAAC;SACf;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,cAAc,CACzB,QAAgB,EAChB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA+B,EAAE;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,kCAC7C,OAAO,KACV,cAAc,kCACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;aACpE;YAED,iEAAiE;YAChE,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;YACjD,OAAO,QAAQ,CAAC;SACjB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAEO,+BAA+B;QACrC,IAAI,aAAa,CAAC;QAClB,IAAI,QAAQ,CAAC;QACb,IAAI;YACF,mCAAmC;YACnC,wEAAwE;YACxE,8DAA8D;YAC9D,8EAA8E;YAC9E,oEAAoE;YACpE,oGAAoG;YACpG,6DAA6D;YAE7D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,gEAAgE;gBAChE,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,mGAAmG;gBACnG,6HAA6H;gBAC7H,qDAAqD;gBACrD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;gBAC9E,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;iBAAM;gBACL,iDAAiD;gBACjD,oCAAoC;gBACpC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;gBACtE,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;gBACnC,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;aAC/B;YAED,+GAA+G;YAC/G,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAClD,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAExC,mEAAmE;YACnE,0GAA0G;YAC1G,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;SACpC;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;SAC5F;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,KAAK,CAAC,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE;;QAEzC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;oBACnE,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,IACvB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAkC;QACtD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,IACvB,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,wBAAwB,CACnC,OAA6C;QAE7C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,iBACpD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,uBAAuB,IACvD,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,YAAY,CACvB,gBAAyB,EACzB,OAAiC;QAEjC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,kBACzD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AA4ND;;GAEG;AACH,MAAM,OAAO,gBAAiB,SAAQ,UAAU;IAsE9C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,qKAAqK;YACrK,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,+DAA+D;YAC/D,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACrE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,MAAM,CAAC,UAAmC,EAAE;;QACvD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,kBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAA8C,EAAE;;QAEhD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;QAC3F,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,iCACxB,cAAc,KACjB,UAAU,IACV,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAI,CAAC,UAAiC,EAAE;;QACnD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,iBACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAmkBD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,UAAU;IA8E7C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6BG;IACI,KAAK,CAAC,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEhE,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAAC,MAAM,EAAE;gBACX,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;aAC3E;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;oBACZ,SAAS,EAAE,KAAK;oBAChB,UAAU,EAAE,KAAK;oBACjB,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;oBACxE,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;iBAC3E,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,IACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;aACzB,CAAC,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;OA0BG;IACI,KAAK,CAAC,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;OAiBG;IAEI,KAAK,CAAC,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,gDACzD,OAAO,KACV,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,KAE1C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;oBAChD,qBAAqB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;oBAChE,iBAAiB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;oBACxD,uBAAuB,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;oBACpE,YAAY,EAAE,MAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,KACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;;;OAoBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,SAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE;QAE/C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,kBACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,kBAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;gBACxB,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;aAC1B;YAED,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;gBAC1B,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;aAC5B;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED,uBAAuB;IAEvB;;;;;;;;;;;;;;OAcG;IACI,KAAK,CAAC,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,MAAM,EAAE;gBACV,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;iBACf;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;oBACtC,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBAC5B;qBAAM;oBACL,IAAI,GAAG,IAAuB,CAAC;oBAC/B,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;iBACrE;gBAED,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAU,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;aACH;iBAAM;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;gBACrC,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;aACH;SACF;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;;;OAkBG;IACI,KAAK,CAAC,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;YAC5C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,EAAQ,EAAE,CAAC,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACK,KAAK,CAAC,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE;QAE5C,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;YACjF,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,EAAE,CAC3E,CAAC;SACH;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;YACjE,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;SAC9D;QACD,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;YAC7B,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;YACA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,EAAE,CACnF,CAAC;SACH;QAED,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;YAC3B,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;gBACnE,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,2CAA2C,CAAC,CAAC;aAC1E;YACD,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;gBAC5D,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;oBACzD,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;iBACvD;aACF;SACF;QACD,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;gBACrC,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;aACtE;YAED,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,6DAA6D;oBAC3D,mCAAmC,qBAAqB,EAAE,CAC7D,CAAC;aACH;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;gBAClC,KAAK,CAAC,YAAY,CAAC,KAAK,IAAkB,EAAE;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;oBAClD,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;oBACrC,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;oBACpE,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;oBAClC,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;oBACxB,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;qBAC9C,CAAC,CAAC;oBACH,0FAA0F;oBAC1F,kEAAkE;oBAClE,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;4BAClB,WAAW,EAAE,gBAAgB;yBAC9B,CAAC,CAAC;qBACJ;gBACH,CAAC,CAAC,CAAC;aACJ;YACD,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;SACxD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE;QAE5C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,EAAE,EAAE;gBAChB,OAAO,GAAG,EAAE,CACV,kBAAkB,CAAC,QAAQ,EAAE;oBAC3B,SAAS,EAAE,IAAI;oBACf,GAAG,EAAE,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ;oBAC1C,KAAK,EAAE,MAAM;iBACd,CAAC,CAAC;YACP,CAAC,EACD,IAAI,kCAEC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,YAAY,CACvB,MAAgB,EAChB,aAAqB,+BAA+B,EACpD,iBAAyB,CAAC,EAC1B,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;YAC5B,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;SAC9B;QACD,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;YACjB,MAAM,aAAa,GAAG,YAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;YAE/B,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;gBACzD,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;gBACxB,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;iBAC9C,CAAC,CAAC;gBAEH,0FAA0F;gBAC1F,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;iBACvD;YACH,CAAC;YACD,kFAAkF;YAClF,2EAA2E;YAC3E,iDAAiD;YACjD,qCAAqC;YACrC,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,CACpC,CAAC;YACF,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,kCACtC,OAAO,KACV,cAAc,kCACT,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF;AAoaD;;GAEG;AACH,MAAM,OAAO,cAAe,SAAQ,UAAU;IA8D5C,YACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;IACnD,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,kHAAkH;QAClH,wFAAwF;QACxF,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;YACtD,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;YAEnC,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;oBAEF,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;SAC1F;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACjE,CAAC;IAED;;;;;;;OAOG;IACI,YAAY,CAAC,QAAgB;QAClC,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,kBAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAChE,sBAAsB,EAAE,MAAA,OAAO,CAAC,kBAAkB,0CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,IAC3C,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;YAC5C,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,kCAC7B,OAAO,KACV,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,4EAA4E;iBACtF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,UAAsC,EAAE;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,UAA6C,EAAE;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,kBAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,8BAA8B,EAAE;oBAC9B,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;oBAC/C,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;oBAC/D,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;oBACvD,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,IAC5E,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,UAAU,CACrB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAqC,EAAE;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,kBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,aAAa,CACxB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAwC,EAAE;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACK,KAAK,CAAC,qBAAqB,CACjC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;OAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,UAAgD,EAAE;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;OAMG;IACY,kBAAkB,CAC/B,SAAiB,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;oBAMnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAsEG;IACI,cAAc,CACnB,SAAiB,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,kBAC7E,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EACV,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;OAaG;IACK,KAAK,CAAC,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;QAElD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,iBACjD,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,KACtB,MAAM,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,UAAU,0CAAE,aAAa,KAE5C,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;oBACnB,MAAM,EAAE,MAAM;oBACd,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,WAAW,IAC9B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IACD;;;;;;;;;;;;;;;OAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD;;YAElD,IAAI,gCAAgC,CAAC;YACrC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,gCAAgC,GAAG,cAAM,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;oBAC5D,oBAAM,cAAM,gCAAgC,CAAA,CAAA,CAAC;iBAC9C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;OAOG;IACY,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD;;;YAElD,IAAI,MAA0B,CAAC;;gBAC/B,KAAyC,IAAA,KAAA,cAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;oBAOnC,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;iBACxD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAE9C,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,oBAC/D,OAAO,EACV,CAAC;QACH,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,kBAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,OAAO,EAEb,CAAC;YACJ,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;OASG;IACI,KAAK,CAAC,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,UAA4C,EAAE;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,iBAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,EACrD;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;SACjC;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;OAOG;IACI,KAAK,CAAC,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,kBAC3C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,KAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,kBACzE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;OAYG;IACI,KAAK,CAAC,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE;;QAEjD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,kBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLike, BlobBeginCopyFromURLResponse>\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js new file mode 100644 index 0000000000..efc10f8027 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js @@ -0,0 +1,1273 @@ +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; +import { getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; +import { SpanStatusCode } from "@azure/core-tracing"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { Container } from "./generated/src/operations"; +import { newPipeline, isPipelineLike } from "./Pipeline"; +import { StorageClient } from "./StorageClient"; +import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, ProcessBlobItems, ProcessBlobPrefixes, toTags, truncatedISO8061Date, } from "./utils/utils.common"; +import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; +import { BlobLeaseClient } from "./BlobLeaseClient"; +import { AppendBlobClient, BlobClient, BlockBlobClient, PageBlobClient, } from "./Clients"; +import { BlobBatchClient } from "./BlobBatchClient"; +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return __asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield __await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return __asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return __asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield __await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield __await(yield __await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield __await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield __await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield __await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield __await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { + const segment = _c.value; + yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} +//# sourceMappingURL=ContainerClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map new file mode 100644 index 0000000000..c38c344801 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerClient.js","sourceRoot":"","sources":["../../../src/ContainerClient.ts"],"names":[],"mappings":";AAGA,OAAO,EACL,uBAAuB,EAGvB,MAAM,EACN,iBAAiB,EAEjB,UAAU,GACX,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,cAAc,EAAE,MAAM,qBAAqB,CAAC;AACrD,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAE,SAAS,EAAE,MAAM,4BAA4B,CAAC;AAgCvD,OAAO,EAAE,WAAW,EAAgB,cAAc,EAA0B,MAAM,YAAY,CAAC;AAC/F,OAAO,EAAiB,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAC/D,OAAO,EAAE,kCAAkC,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AACjF,OAAO,EACL,eAAe,EACf,gBAAgB,EAChB,gBAAgB,EAChB,qCAAqC,EACrC,0CAA0C,EAC1C,4BAA4B,EAC5B,iBAAiB,EACjB,4BAA4B,EAC5B,gBAAgB,EAChB,mBAAmB,EACnB,MAAM,EACN,oBAAoB,GACrB,MAAM,sBAAsB,CAAC;AAE9B,OAAO,EAAE,8BAA8B,EAAE,MAAM,8BAA8B,CAAC;AAC9E,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EACL,gBAAgB,EAChB,UAAU,EAEV,eAAe,EAGf,cAAc,GACf,MAAM,WAAW,CAAC;AACnB,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAyiBpD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,aAAa;IAgEhD,YACE,qBAA6B,EAC7B,mCAKgB;IAChB,mFAAmF;IACnF,gEAAgE;IAChE,OAAgC;QAEhC,IAAI,QAAsB,CAAC;QAC3B,IAAI,GAAW,CAAC;QAChB,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;QACxB,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;YACvD,oCAAoC;YACpC,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;SAChD;aAAM,IACL,CAAC,MAAM,IAAI,mCAAmC,YAAY,0BAA0B,CAAC;YACrF,mCAAmC,YAAY,mBAAmB;YAClE,iBAAiB,CAAC,mCAAmC,CAAC,EACtD;YACA,mIAAmI;YACnI,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;SACtE;aAAM,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,mIAAmI;YACnI,+DAA+D;YAC/D,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;SAC5D;aAAM,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;YACA,wGAAwG;YACxG,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAE1D,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;YAC3E,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;gBAC/C,IAAI,MAAM,EAAE;oBACV,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;oBAE7E,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG,uBAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;qBACzE;oBAED,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;iBACtD;qBAAM;oBACL,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;iBACvF;aACF;iBAAM,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;aAC5D;iBAAM;gBACL,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;aACH;SACF;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;SAC5E;QACD,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;IACnE,CAAC;IApID;;OAEG;IACH,IAAW,aAAa;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;IAC7B,CAAC;IAiID;;;;;;;;;;;;;;;OAeG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,gDAAgD;YAChD,uFAAuF;YACvF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iCACpC,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,iBAAiB,CAC5B,UAAkC,EAAE;;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EACL,iFAAiF;iBACpF,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;OAQG;IACI,KAAK,CAAC,MAAM,CAAC,UAAkC,EAAE;QACtD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;aAC9C,CAAC,CAAC;YACH,OAAO,IAAI,CAAC;SACb;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,sDAAsD;iBAChE,CAAC,CAAC;gBACH,OAAO,KAAK,CAAC;aACd;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,aAAa,CAAC,QAAgB;QACnC,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IAChG,CAAC;IAED;;;;OAIG;IACI,mBAAmB,CAAC,QAAgB;QACzC,OAAO,IAAI,gBAAgB,CACzB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACI,kBAAkB,CAAC,QAAgB;QACxC,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,QAAgB;QACvC,OAAO,IAAI,cAAc,CACvB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,aAAa,CACxB,UAAyC,EAAE;QAE3C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,+BAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,OAAO,CAAC,UAAU,GAClB,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,MAAM,CACjB,UAAwC,EAAE;QAE1C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,iBACvC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;OAMG;IACI,KAAK,CAAC,cAAc,CACzB,UAAwC,EAAE;;QAE1C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,qCACE,SAAS,EAAE,IAAI,IACZ,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,IACxB;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,0CAAE,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAE,cAAc,CAAC,KAAK;oBAC1B,OAAO,EAAE,iEAAiE;iBAC3E,CAAC,CAAC;gBACH,qCACE,SAAS,EAAE,KAAK,IACb,MAAA,CAAC,CAAC,QAAQ,0CAAE,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,IACrB;aACH;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACI,KAAK,CAAC,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE;QAEzC,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;YACxC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;SACH;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,eAAe,CAC1B,UAA2C,EAAE;QAE7C,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;YACvB,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;SACzB;QAED,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,IACtC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;gBACzC,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;YAEF,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;oBAC3B,YAAY,GAAG;wBACb,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;oBAEF,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;wBACrC,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;qBACtE;oBAED,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;wBACpC,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;qBACpE;iBACF;gBAED,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,GAAG,CAAC;SACZ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACI,KAAK,CAAC,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;YACxC,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;oBACP,YAAY,EAAE;wBACZ,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;4BAC1C,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;4BACzD,CAAC,CAAC,EAAE;wBACN,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;wBAChD,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;4BACxC,CAAC,CAAC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;4BACxD,CAAC,CAAC,EAAE;qBACP;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;iBAClB,CAAC,CAAC;aACJ;YAED,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,iBAChD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;SACJ;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;OAKG;IACI,kBAAkB,CAAC,cAAuB;QAC/C,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IACnD,CAAC;IAED;;;;;;;;;;;;;;;;;;;;;OAqBG;IACI,KAAK,CAAC,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE;QAEpC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;YAC1D,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;SACH;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CAAC,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE;QAExC,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;aACxD;YACD,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;SAChD;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;OASG;IACK,KAAK,CAAC,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,+BAC9D,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;gBACnD,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;aAClF;YAED,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAElF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACK,KAAK,CAAC,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;QAE9C,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,IACH,OAAO,GACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;gBACnD,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;aAClF;YAED,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;YACnC,IAAK,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,KAAK,SAAS,EAAE;gBACzD,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,mBAAmB,CAChD,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,CACxC,CAAC;aACH;YAED,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,KAEvF,OAAO,kCACF,QAAQ,CAAC,OAAO,KACnB,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;wBAC5D,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;wBACF,OAAO,QAAQ,CAAC;oBAClB,CAAC,CAAC,EACF,YAAY,EAAE,MAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;wBACtE,MAAM,UAAU,GAAe;4BAC7B,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;yBAChD,CAAC;wBACF,OAAO,UAAU,CAAC;oBACpB,CAAC,CAAC,MAEL,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;OAWG;IACY,YAAY,CACzB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,4BAA4B,CAAC;YACjC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,cAAM,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBAC/E,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;oBACxD,oBAAM,cAAM,4BAA4B,CAAA,CAAA,CAAC;iBAC1C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;OAIG;IACY,SAAS,CACtB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAiD,IAAA,KAAA,cAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,IAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;oBAC3C,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;iBACvD;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAqEG;IACI,aAAa,CAClB,UAAqC,EAAE;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QAEF,8CAA8C;QAC9C,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,kBACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;OAYG;IACY,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE;;YAE9C,IAAI,iCAAiC,CAAC;YACtC,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,iCAAiC,GAAG,cAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;oBACF,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;oBAC7D,oBAAM,cAAM,iCAAiC,CAAA,CAAA,CAAC;iBAC/C,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;OAKG;IACY,oBAAoB,CACjC,SAAiB,EACjB,UAA4C,EAAE;;;YAE9C,IAAI,MAA0B,CAAC;;gBAC/B,KAAsD,IAAA,KAAA,cAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;oBAKhD,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;wBACxB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;4BACzC,oCACE,IAAI,EAAE,QAAQ,IACX,MAAM,EACV,CAAC;yBACH;qBACF;oBACD,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;wBACpC,oCAAQ,IAAI,EAAE,MAAM,IAAK,IAAI,EAAE,CAAC;qBACjC;iBACF;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA6EG;IACI,oBAAoB,CACzB,SAAiB,EACjB,UAAqC,EAAE;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;SACzE;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;YAC1B,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;SACzB;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;YAC3B,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;SAC1B;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;YAClC,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;SAClC;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;YACvB,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SACtB;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;SACrC;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;YACrC,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;SACpC;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;YAC5B,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;SAC3B;QACD,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;YACzB,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;SAC5B;QAED,MAAM,cAAc,mCACf,OAAO,GACP,CAAC,OAAO,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,CAAC,CACpD,CAAC;QACF,gEAAgE;QAChE,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;YACL;;eAEG;YACH,KAAK,CAAC,IAAI;gBACR,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,kBACrE,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,cAAc,EACjB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAED;;;;;;;;;;;;;;;;OAgBG;IACK,KAAK,CAAC,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;QAEpD,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,IAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,KACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,MAAA,IAAI,CAAC,IAAI,0CAAE,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;qBAC1C;oBACD,uCAAY,IAAI,KAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,IAAG;gBACxD,CAAC,CAAC,GACH,CAAC;YACF,OAAO,eAAe,CAAC;SACxB;QAAC,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAE,cAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;aACnB,CAAC,CAAC;YACH,MAAM,CAAC,CAAC;SACT;gBAAS;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;SACZ;IACH,CAAC;IAED;;;;;;;;;;;;;;;OAeG;IACY,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE;;YAEpD,IAAI,QAAQ,CAAC;YACb,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,QAAQ,GAAG,cAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;oBACtC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,oBAAM,QAAQ,CAAA,CAAC;iBAChB,QAAQ,MAAM,EAAE;aAClB;QACH,CAAC;KAAA;IAED;;;;;;;;OAQG;IACY,oBAAoB,CACjC,sBAA8B,EAC9B,UAAkD,EAAE;;;YAEpD,IAAI,MAA0B,CAAC;;gBAC/B,KAA4B,IAAA,KAAA,cAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,IAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,cAAA,KAAK,CAAC,CAAC,iBAAA,cAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;iBACtB;;;;;;;;;QACH,CAAC;KAAA;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4EG;IACI,eAAe,CACpB,sBAA8B,EAC9B,UAA0C,EAAE;QAE5C,8CAA8C;QAC9C,MAAM,kBAAkB,qBACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;YACL;;eAEG;YACH,IAAI;gBACF,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,CAAC;YACD;;eAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC;gBACpB,OAAO,IAAI,CAAC;YACd,CAAC;YACD;;eAEG;YACH,MAAM,EAAE,CAAC,WAAyB,EAAE,EAAE,EAAE;gBACtC,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,kBACpF,WAAW,EAAE,QAAQ,CAAC,WAAW,IAC9B,kBAAkB,EACrB,CAAC;YACL,CAAC;SACF,CAAC;IACJ,CAAC;IAEO,uBAAuB;QAC7B,IAAI,aAAa,CAAC;QAClB,IAAI;YACF,mCAAmC;YACnC,mEAAmE;YACnE,yDAAyD;YACzD,+FAA+F;YAC/F,wDAAwD;YAExD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YAE7C,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;gBACjD,2DAA2D;gBAC3D,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;gBACvC,8FAA8F;gBAC9F,wHAAwH;gBACxH,gDAAgD;gBAChD,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;iBAAM;gBACL,4CAA4C;gBAC5C,+BAA+B;gBAC/B,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;aACpD;YAED,mGAAmG;YACnG,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;gBAClB,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;aACvD;YAED,OAAO,aAAa,CAAC;SACtB;QAAC,OAAO,KAAU,EAAE;YACnB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;IACH,CAAC;IAED;;;;;;;;;;OAUG;IACI,cAAc,CAAC,OAAuC;QAC3D,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE;YAC7B,IAAI,CAAC,CAAC,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;gBAC5D,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;aACH;YAED,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;;;OAMG;IACI,kBAAkB;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n ProcessBlobItems,\n ProcessBlobPrefixes,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n response.segment.blobPrefixes = [];\n if ((response.segment as any)[\"BlobPrefix\"] !== undefined) {\n response.segment.blobPrefixes = ProcessBlobPrefixes(\n (response.segment as any)[\"BlobPrefix\"]\n );\n }\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js new file mode 100644 index 0000000000..4854ba6f72 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +export function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} +//# sourceMappingURL=PageBlobRangeResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map new file mode 100644 index 0000000000..d442d121f2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/PageBlobRangeResponse.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PageBlobRangeResponse.js","sourceRoot":"","sources":["../../../src/PageBlobRangeResponse.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AA8ElC;;;;;GAKG;AACH,MAAM,UAAU,sBAAsB,CACpC,QAAqF;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;QACf,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;KACvB,CAAC,CAAC,CAAC;IAEJ,uCACK,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,kCACJ,QAAQ,CAAC,SAAS,KACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,OAEH;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js new file mode 100644 index 0000000000..bbb977ae98 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, deserializationPolicy, disableResponseDecompressionPolicy, HttpHeaders, RequestPolicyOptions, WebResource, proxyPolicy, isNode, isTokenCredential, tracingPolicy, logPolicy, keepAlivePolicy, generateClientRequestIdPolicy, } from "@azure/core-http"; +import { logger } from "./log"; +import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory"; +import { StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, } from "./utils/constants"; +import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory"; +import { getCachedDefaultHttpClient } from "./utils/cache"; +import { attachCredential } from "./utils/utils.common"; +import { storageBearerTokenChallengeAuthenticationPolicy } from "./policies/StorageBearerTokenChallengeAuthenticationPolicy"; +// Export following interfaces and types for customers who want to implement their +// own RequestPolicy or HTTPClient +export { BaseRequestPolicy, StorageOAuthScopes, deserializationPolicy, HttpHeaders, WebResource, RequestPolicyOptions, }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + deserializationPolicy(undefined, { xmlCharKey: "#" }), + logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(disableResponseDecompressionPolicy()); + } + factories.push(isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} +//# sourceMappingURL=Pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map new file mode 100644 index 0000000000..58db66556c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Pipeline.js","sourceRoot":"","sources":["../../../src/Pipeline.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,qBAAqB,EACrB,kCAAkC,EAElC,WAAW,EAKX,oBAAoB,EAEpB,WAAW,EACX,WAAW,EACX,MAAM,EAEN,iBAAiB,EACjB,aAAa,EACb,SAAS,EAET,eAAe,EAEf,6BAA6B,GAE9B,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,2BAA2B,EAAE,MAAM,+BAA+B,CAAC;AAC5E,OAAO,EAAuB,yBAAyB,EAAE,MAAM,6BAA6B,CAAC;AAE7F,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EACL,kBAAkB,EAClB,oCAAoC,EACpC,wCAAwC,GACzC,MAAM,mBAAmB,CAAC;AAC3B,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC;AAClE,OAAO,EAAE,0BAA0B,EAAE,MAAM,eAAe,CAAC;AAC3D,OAAO,EAAE,gBAAgB,EAAE,MAAM,sBAAsB,CAAC;AACxD,OAAO,EAAE,+CAA+C,EAAE,MAAM,4DAA4D,CAAC;AAE7H,kFAAkF;AAClF,kCAAkC;AAClC,OAAO,EACL,iBAAiB,EACjB,kBAAkB,EAClB,qBAAqB,EAErB,WAAW,EAGX,WAAW,EAGX,oBAAoB,GACrB,CAAC;AAsCF;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,QAAiB;IAC9C,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;QAC7C,OAAO,KAAK,CAAC;KACd;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,OAAO,CACL,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;QACrC,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;QACxC,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,CAC1D,CAAC;AACJ,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,OAAO,QAAQ;IAUnB;;;;;OAKG;IACH,YAAY,SAAiC,EAAE,UAA2B,EAAE;QAC1E,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAC3B,uFAAuF;QACvF,kDAAkD;QAClD,IAAI,CAAC,OAAO,mCACP,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;IACJ,CAAC;IAED;;;;;OAKG;IACI,sBAAsB;QAC3B,OAAO;YACL,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;IACJ,CAAC;CACF;AAgCD;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;QAC5B,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;KACxC;IAED,0FAA0F;IAC1F,mFAAmF;IACnF,sEAAsE;IAEtE,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;IACrF,MAAM,SAAS,GAA2B;QACxC,aAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;QAC7D,eAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;QACf,6BAA6B,EAAE;QAC/B,IAAI,2BAA2B,EAAE;QACjC,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;QAC3D,8DAA8D;QAC9D,sEAAsE;QACtE,eAAe;QACf,qBAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;QACrD,SAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;YACnB,kBAAkB,EAAE,oCAAoC;YACxD,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;IAEF,IAAI,MAAM,EAAE;QACV,8DAA8D;QAC9D,SAAS,CAAC,IAAI,CAAC,WAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;QAC1D,SAAS,CAAC,IAAI,CAAC,kCAAkC,EAAE,CAAC,CAAC;KACtD;IACD,SAAS,CAAC,IAAI,CACZ,iBAAiB,CAAC,UAAU,CAAC;QAC3B,CAAC,CAAC,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,MAAA,eAAe,CAAC,QAAQ,mCAAI,kBAAkB,CAC/C,EACD,UAAU,CACX;QACH,CAAC,CAAC,UAAU,CACf,CAAC;IAEF,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js new file mode 100644 index 0000000000..ea83a0c32f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +export function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} +//# sourceMappingURL=Range.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map new file mode 100644 index 0000000000..a501b75853 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Range.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Range.js","sourceRoot":"","sources":["../../../src/Range.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,aAAa,CAAC,MAAa;IACzC,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;QACrB,MAAM,IAAI,UAAU,CAAC,wCAAwC,CAAC,CAAC;KAChE;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;QACrC,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;KACH;IACD,OAAO,MAAM,CAAC,KAAK;QACjB,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,IAAI,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE;QAC9D,CAAC,CAAC,SAAS,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js new file mode 100644 index 0000000000..e35175c93a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageBrowserPolicy } from "./policies/StorageBrowserPolicy"; +export { StorageBrowserPolicy }; +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=StorageBrowserPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map new file mode 100644 index 0000000000..655540db28 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageBrowserPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageBrowserPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,oBAAoB,EAAE,MAAM,iCAAiC,CAAC;AACvE,OAAO,EAAE,oBAAoB,EAAE,CAAC;AAEhC;;GAEG;AACH,MAAM,OAAO,2BAA2B;IACtC;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IACvD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js new file mode 100644 index 0000000000..c36e4aceb3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageClientContext } from "./generated/src/storageClientContext"; +import { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from "./utils/utils.common"; +import { AnonymousCredential } from "./credentials/AnonymousCredential"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; +import { isTokenCredential, isNode } from "@azure/core-http"; +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +export class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} +//# sourceMappingURL=StorageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map new file mode 100644 index 0000000000..d7bf4fc385 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageClient.js","sourceRoot":"","sources":["../../../src/StorageClient.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,oBAAoB,EAAE,MAAM,sCAAsC,CAAC;AAE5E,OAAO,EAAE,aAAa,EAAE,YAAY,EAAE,MAAM,EAAE,qBAAqB,EAAE,MAAM,sBAAsB,CAAC;AAClG,OAAO,EAAE,mBAAmB,EAAE,MAAM,mCAAmC,CAAC;AACxE,OAAO,EAAE,0BAA0B,EAAE,MAAM,0CAA0C,CAAC;AACtF,OAAO,EAAmB,iBAAiB,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAa9E;;;GAGG;AACH,MAAM,OAAgB,aAAa;IAyBjC;;;;OAIG;IACH,YAAsB,GAAW,EAAE,QAAsB;QACvD,iFAAiF;QACjF,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;QAC9C,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;QAE7D,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;YAC7C,IACE,CAAC,MAAM,IAAI,OAAO,YAAY,0BAA0B,CAAC;gBACzD,OAAO,YAAY,mBAAmB,EACtC;gBACA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;aAC3B;iBAAM,IAAI,iBAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;gBACzD,uEAAuE;gBACvE,0DAA0D;gBAC1D,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;aAC/C;SACF;QAED,iDAAiD;QACjD,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;QAC9D,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;IACtD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js new file mode 100644 index 0000000000..4f057291e6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageRetryPolicy, StorageRetryPolicyType } from "./policies/StorageRetryPolicy"; +export { StorageRetryPolicyType, StorageRetryPolicy }; +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} +//# sourceMappingURL=StorageRetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map new file mode 100644 index 0000000000..9cfff5830c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageRetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/StorageRetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,sBAAsB,EAAE,MAAM,+BAA+B,CAAC;AAE3F,OAAO,EAAE,sBAAsB,EAAE,kBAAkB,EAAE,CAAC;AAmDtD;;GAEG;AACH,MAAM,OAAO,yBAAyB;IAGpC;;;OAGG;IACH,YAAY,YAAkC;QAC5C,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js new file mode 100644 index 0000000000..a2e71a3b70 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js @@ -0,0 +1,50 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode, } from "@azure/core-http"; +import * as os from "os"; +import { TelemetryPolicy } from "./policies/TelemetryPolicy"; +import { SDK_VERSION } from "./utils/constants"; +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +export class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} +//# sourceMappingURL=TelemetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map new file mode 100644 index 0000000000..ee8cbcacbf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicyFactory.js","sourceRoot":"","sources":["../../../src/TelemetryPolicyFactory.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,MAAM,GAKP,MAAM,kBAAkB,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AAEzB,OAAO,EAAE,eAAe,EAAE,MAAM,4BAA4B,CAAC;AAC7D,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD;;GAEG;AACH,MAAM,OAAO,sBAAsB;IAMjC;;;OAGG;IACH,YAAY,SAA4B;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;QAEnC,IAAI,MAAM,EAAE;YACV,IAAI,SAAS,EAAE;gBACb,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;gBACxD,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;oBAC/E,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;iBACrC;aACF;YAED,mCAAmC;YACnC,MAAM,OAAO,GAAG,wBAAwB,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;gBACzC,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;aAC7B;YAED,mDAAmD;YACnD,IAAI,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;YACtD,IAAI,EAAE,EAAE;gBACN,WAAW,GAAG,iBAAiB,OAAO,CAAC,OAAO,KAAK,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,OAAO,EAAE,GAAG,CAAC;aACjF;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;gBAC7C,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;aACjC;SACF;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IACjD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;IACxE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js new file mode 100644 index 0000000000..2511e14492 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AnonymousCredentialPolicy } from "../policies/AnonymousCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map new file mode 100644 index 0000000000..6819d4436a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/AnonymousCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredential.js","sourceRoot":"","sources":["../../../../src/credentials/AnonymousCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,yBAAyB,EAAE,MAAM,uCAAuC,CAAC;AAClF,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;;GAKG;AACH,MAAM,OAAO,mBAAoB,SAAQ,UAAU;IACjD;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC5D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js new file mode 100644 index 0000000000..6db7be6a4d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +export class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} +//# sourceMappingURL=Credential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map new file mode 100644 index 0000000000..a7948dc95d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/Credential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Credential.js","sourceRoot":"","sources":["../../../../src/credentials/Credential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAKlC;;;GAGG;AACH,MAAM,OAAgB,UAAU;IAC9B;;;;;OAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B;QACtE,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;IACvE,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js new file mode 100644 index 0000000000..1235e86379 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class StorageSharedKeyCredential { +} +//# sourceMappingURL=StorageSharedKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map new file mode 100644 index 0000000000..12177de4be --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,0BAA0B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class StorageSharedKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js new file mode 100644 index 0000000000..c47b1ceb95 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +import { StorageSharedKeyCredentialPolicy } from "../policies/StorageSharedKeyCredentialPolicy"; +import { Credential } from "./Credential"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=StorageSharedKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map new file mode 100644 index 0000000000..c73c0e4025 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/StorageSharedKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC,OAAO,EAAE,gCAAgC,EAAE,MAAM,8CAA8C,CAAC;AAChG,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAE1C;;;;GAIG;AACH,MAAM,OAAO,0BAA2B,SAAQ,UAAU;IAWxD;;;;OAIG;IACH,YAAY,WAAmB,EAAE,UAAkB;QACjD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;IACtD,CAAC;IAED;;;;;OAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;IACzE,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IAC7F,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js new file mode 100644 index 0000000000..cffcb3ce62 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class UserDelegationKeyCredential { +} +//# sourceMappingURL=UserDelegationKeyCredential.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map new file mode 100644 index 0000000000..0a1e590c56 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.browser.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,OAAO,2BAA2B;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport class UserDelegationKeyCredential {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js new file mode 100644 index 0000000000..fa438f699c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +export class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} +//# sourceMappingURL=UserDelegationKeyCredential.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map new file mode 100644 index 0000000000..df787bb71b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js.map @@ -0,0 +1 @@ +{"version":3,"file":"UserDelegationKeyCredential.js","sourceRoot":"","sources":["../../../../src/credentials/UserDelegationKeyCredential.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,QAAQ,CAAC;AAGpC;;;;;GAKG;AACH,MAAM,OAAO,2BAA2B;IAgBtC;;;;OAIG;IACH,YAAY,WAAmB,EAAE,iBAAoC;QACnE,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;QAC3C,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;IAC5D,CAAC;IAED;;;;OAIG;IACI,iBAAiB,CAAC,YAAoB;QAC3C,gEAAgE;QAEhE,OAAO,UAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;IACtF,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js new file mode 100644 index 0000000000..290e550ab2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js @@ -0,0 +1,11 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./models"; +export { StorageClient } from "./storageClient"; +export { StorageClientContext } from "./storageClientContext"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map new file mode 100644 index 0000000000..5601aab1d8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../src/generated/src/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,UAAU,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./models\";\nexport { StorageClient } from \"./storageClient\";\nexport { StorageClientContext } from \"./storageClientContext\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js new file mode 100644 index 0000000000..9593344831 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map new file mode 100644 index 0000000000..4fc3835acf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\n/** Storage Service Properties. */\nexport interface BlobServiceProperties {\n /** Azure Analytics Logging settings. */\n blobAnalyticsLogging?: Logging;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n hourMetrics?: Metrics;\n /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\n minuteMetrics?: Metrics;\n /** The set of CORS rules. */\n cors?: CorsRule[];\n /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */\n defaultServiceVersion?: string;\n /** the retention policy which determines how long the associated data should persist */\n deleteRetentionPolicy?: RetentionPolicy;\n /** The properties that enable an account to host a static website */\n staticWebsite?: StaticWebsite;\n}\n\n/** Azure Analytics Logging settings. */\nexport interface Logging {\n /** The version of Storage Analytics to configure. */\n version: string;\n /** Indicates whether all delete requests should be logged. */\n deleteProperty: boolean;\n /** Indicates whether all read requests should be logged. */\n read: boolean;\n /** Indicates whether all write requests should be logged. */\n write: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy: RetentionPolicy;\n}\n\n/** the retention policy which determines how long the associated data should persist */\nexport interface RetentionPolicy {\n /** Indicates whether a retention policy is enabled for the storage service */\n enabled: boolean;\n /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */\n days?: number;\n}\n\n/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */\nexport interface Metrics {\n /** The version of Storage Analytics to configure. */\n version?: string;\n /** Indicates whether metrics are enabled for the Blob service. */\n enabled: boolean;\n /** Indicates whether metrics should generate summary statistics for called API operations. */\n includeAPIs?: boolean;\n /** the retention policy which determines how long the associated data should persist */\n retentionPolicy?: RetentionPolicy;\n}\n\n/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */\nexport interface CorsRule {\n /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */\n allowedOrigins: string;\n /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */\n allowedMethods: string;\n /** the request headers that the origin domain may specify on the CORS request. */\n allowedHeaders: string;\n /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */\n exposedHeaders: string;\n /** The maximum amount time that a browser should cache the preflight OPTIONS request. */\n maxAgeInSeconds: number;\n}\n\n/** The properties that enable an account to host a static website */\nexport interface StaticWebsite {\n /** Indicates whether this account is hosting a static website */\n enabled: boolean;\n /** The default name of the index page under each directory */\n indexDocument?: string;\n /** The absolute path of the custom 404 page */\n errorDocument404Path?: string;\n /** Absolute path of the default index page */\n defaultIndexDocumentPath?: string;\n}\n\nexport interface StorageError {\n message?: string;\n code?: string;\n}\n\n/** Stats for the storage service. */\nexport interface BlobServiceStatistics {\n /** Geo-Replication information for the Secondary Storage Service */\n geoReplication?: GeoReplication;\n}\n\n/** Geo-Replication information for the Secondary Storage Service */\nexport interface GeoReplication {\n /** The status of the secondary location */\n status: GeoReplicationStatusType;\n /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */\n lastSyncOn: Date;\n}\n\n/** An enumeration of containers */\nexport interface ListContainersSegmentResponse {\n serviceEndpoint: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n containerItems: ContainerItem[];\n continuationToken?: string;\n}\n\n/** An Azure Storage container */\nexport interface ContainerItem {\n name: string;\n deleted?: boolean;\n version?: string;\n /** Properties of a container */\n properties: ContainerProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Properties of a container */\nexport interface ContainerProperties {\n lastModified: Date;\n etag: string;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n publicAccess?: PublicAccessType;\n hasImmutabilityPolicy?: boolean;\n hasLegalHold?: boolean;\n defaultEncryptionScope?: string;\n preventEncryptionScopeOverride?: boolean;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n /** Indicates if version level worm is enabled on this container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n}\n\n/** Key information */\nexport interface KeyInfo {\n /** The date-time the key is active in ISO 8601 UTC time */\n startsOn: string;\n /** The date-time the key expires in ISO 8601 UTC time */\n expiresOn: string;\n}\n\n/** A user delegation key */\nexport interface UserDelegationKey {\n /** The Azure Active Directory object ID in GUID format. */\n signedObjectId: string;\n /** The Azure Active Directory tenant ID in GUID format */\n signedTenantId: string;\n /** The date-time the key is active */\n signedStartsOn: string;\n /** The date-time the key expires */\n signedExpiresOn: string;\n /** Abbreviation of the Azure Storage service that accepts the key */\n signedService: string;\n /** The service version that created the key */\n signedVersion: string;\n /** The key as a base64 string */\n value: string;\n}\n\n/** The result of a Filter Blobs API call */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\n/** Blob info from a Filter Blobs API call */\nexport interface FilterBlobItem {\n name: string;\n containerName: string;\n /** Blob tags */\n tags?: BlobTags;\n}\n\n/** Blob tags */\nexport interface BlobTags {\n blobTagSet: BlobTag[];\n}\n\nexport interface BlobTag {\n key: string;\n value: string;\n}\n\n/** signed identifier */\nexport interface SignedIdentifier {\n /** a unique id */\n id: string;\n /** An Access policy */\n accessPolicy: AccessPolicy;\n}\n\n/** An Access policy */\nexport interface AccessPolicy {\n /** the date-time the policy is active */\n startsOn?: string;\n /** the date-time the policy expires */\n expiresOn?: string;\n /** the permissions for the acl policy */\n permissions?: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegment {\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: BlobName;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobPropertiesInternal;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\nexport interface BlobName {\n /** Indicates if the blob name is encoded. */\n encoded?: boolean;\n /** The name of the blob. */\n content?: string;\n}\n\n/** Properties of a blob */\nexport interface BlobPropertiesInternal {\n createdOn?: Date;\n lastModified: Date;\n etag: string;\n /** Size in bytes */\n contentLength?: number;\n contentType?: string;\n contentEncoding?: string;\n contentLanguage?: string;\n contentMD5?: Uint8Array;\n contentDisposition?: string;\n cacheControl?: string;\n blobSequenceNumber?: number;\n blobType?: BlobType;\n leaseStatus?: LeaseStatusType;\n leaseState?: LeaseStateType;\n leaseDuration?: LeaseDurationType;\n copyId?: string;\n copyStatus?: CopyStatusType;\n copySource?: string;\n copyProgress?: string;\n copyCompletedOn?: Date;\n copyStatusDescription?: string;\n serverEncrypted?: boolean;\n incrementalCopy?: boolean;\n destinationSnapshot?: string;\n deletedOn?: Date;\n remainingRetentionDays?: number;\n accessTier?: AccessTier;\n accessTierInferred?: boolean;\n archiveStatus?: ArchiveStatus;\n customerProvidedKeySha256?: string;\n /** The name of the encryption scope under which the blob is encrypted. */\n encryptionScope?: string;\n accessTierChangedOn?: Date;\n tagCount?: number;\n expiresOn?: Date;\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */\n rehydratePriority?: RehydratePriority;\n lastAccessedOn?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\nexport interface BlobPrefix {\n name: BlobName;\n}\n\nexport interface BlockLookupList {\n committed?: string[];\n uncommitted?: string[];\n latest?: string[];\n}\n\nexport interface BlockList {\n committedBlocks?: Block[];\n uncommittedBlocks?: Block[];\n}\n\n/** Represents a single block in a block blob. It describes the block's ID and size. */\nexport interface Block {\n /** The base64 encoded block ID. */\n name: string;\n /** The block size in bytes. */\n size: number;\n}\n\n/** the list of pages */\nexport interface PageList {\n pageRange?: PageRange[];\n clearRange?: ClearRange[];\n continuationToken?: string;\n}\n\nexport interface PageRange {\n start: number;\n end: number;\n}\n\nexport interface ClearRange {\n start: number;\n end: number;\n}\n\n/** Groups the set of query request settings. */\nexport interface QueryRequest {\n /** Required. The type of the provided query expression. */\n queryType: string;\n /** The query expression in SQL. The maximum size of the query expression is 256KiB. */\n expression: string;\n inputSerialization?: QuerySerialization;\n outputSerialization?: QuerySerialization;\n}\n\nexport interface QuerySerialization {\n format: QueryFormat;\n}\n\nexport interface QueryFormat {\n /** The quick query format type. */\n type: QueryFormatType;\n /** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\n delimitedTextConfiguration?: DelimitedTextConfiguration;\n /** json text configuration */\n jsonTextConfiguration?: JsonTextConfiguration;\n /** Groups the settings used for formatting the response if the response should be Arrow formatted. */\n arrowConfiguration?: ArrowConfiguration;\n /** Any object */\n parquetTextConfiguration?: any;\n}\n\n/** Groups the settings used for interpreting the blob data if the blob is delimited text formatted. */\nexport interface DelimitedTextConfiguration {\n /** The string used to separate columns. */\n columnSeparator?: string;\n /** The string used to quote a specific field. */\n fieldQuote?: string;\n /** The string used to separate records. */\n recordSeparator?: string;\n /** The string used as an escape character. */\n escapeChar?: string;\n /** Represents whether the data has headers. */\n headersPresent?: boolean;\n}\n\n/** json text configuration */\nexport interface JsonTextConfiguration {\n /** The string used to separate records. */\n recordSeparator?: string;\n}\n\n/** Groups the settings used for formatting the response if the response should be Arrow formatted. */\nexport interface ArrowConfiguration {\n schema: ArrowField[];\n}\n\n/** Groups settings regarding specific field of an arrow schema */\nexport interface ArrowField {\n type: string;\n name?: string;\n precision?: number;\n scale?: number;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_setProperties operation. */\nexport interface ServiceSetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getProperties operation. */\nexport interface ServiceGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getStatistics operation. */\nexport interface ServiceGetStatisticsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_listContainersSegment operation. */\nexport interface ServiceListContainersSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getUserDelegationKey operation. */\nexport interface ServiceGetUserDelegationKeyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */\n isHierarchicalNamespaceEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_getAccountInfo operation. */\nexport interface ServiceGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_submitBatch operation. */\nexport interface ServiceSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Service_filterBlobs operation. */\nexport interface ServiceFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_create operation. */\nexport interface ContainerCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesHeaders {\n metadata?: { [propertyName: string]: string };\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** Indicates whether the container has an immutability policy set on it. */\n hasImmutabilityPolicy?: boolean;\n /** Indicates whether the container has a legal hold. */\n hasLegalHold?: boolean;\n /** The default encryption scope for the container. */\n defaultEncryptionScope?: string;\n /** Indicates whether the container's default encryption scope can be overriden. */\n denyEncryptionScopeOverride?: boolean;\n /** Indicates whether version level worm is enabled on a container. */\n isImmutableStorageWithVersioningEnabled?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getProperties operation. */\nexport interface ContainerGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_delete operation. */\nexport interface ContainerDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setMetadata operation. */\nexport interface ContainerSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyHeaders {\n /** Indicated whether data in the container may be accessed publicly and the level of access */\n blobPublicAccess?: PublicAccessType;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccessPolicy operation. */\nexport interface ContainerGetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_setAccessPolicy operation. */\nexport interface ContainerSetAccessPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_restore operation. */\nexport interface ContainerRestoreExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_rename operation. */\nexport interface ContainerRenameExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchHeaders {\n /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */\n contentType?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n}\n\n/** Defines headers for Container_submitBatch operation. */\nexport interface ContainerSubmitBatchExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_filterBlobs operation. */\nexport interface ContainerFilterBlobsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_acquireLease operation. */\nexport interface ContainerAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_releaseLease operation. */\nexport interface ContainerReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_renewLease operation. */\nexport interface ContainerRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_breakLease operation. */\nexport interface ContainerBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a container's lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Container_changeLease operation. */\nexport interface ContainerChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobFlatSegment operation. */\nexport interface ContainerListBlobFlatSegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentHeaders {\n /** The media type of the body of the response. For List Blobs this is 'application/xml' */\n contentType?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Container_listBlobHierarchySegment operation. */\nexport interface ContainerListBlobHierarchySegmentExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Container_getAccountInfo operation. */\nexport interface ContainerGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_download operation. */\nexport interface BlobDownloadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesHeaders {\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Returns the date and time the blob was created. */\n createdOn?: Date;\n metadata?: { [propertyName: string]: string };\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */\n objectReplicationPolicyId?: string;\n /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */\n objectReplicationRules?: { [propertyName: string]: string };\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletedOn?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Included if the blob is incremental copy blob. */\n isIncrementalCopy?: boolean;\n /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */\n destinationSnapshot?: string;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */\n contentLength?: number;\n /** The content type specified for the blob. The default content type is 'application/octet-stream' */\n contentType?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */\n accessTier?: string;\n /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */\n accessTierInferred?: boolean;\n /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */\n archiveStatus?: string;\n /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */\n accessTierChangedOn?: Date;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */\n isCurrentVersion?: boolean;\n /** The number of tags associated with the blob */\n tagCount?: number;\n /** The time this blob will expire. */\n expiresOn?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */\n rehydratePriority?: RehydratePriority;\n /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */\n lastAccessed?: Date;\n /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */\n immutabilityPolicyExpiresOn?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Indicates if a legal hold is present on the blob. */\n legalHold?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getProperties operation. */\nexport interface BlobGetPropertiesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_delete operation. */\nexport interface BlobDeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_undelete operation. */\nexport interface BlobUndeleteExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */\n date?: Date;\n}\n\n/** Defines headers for Blob_setExpiry operation. */\nexport interface BlobSetExpiryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setHttpHeaders operation. */\nexport interface BlobSetHttpHeadersExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates the time the immutability policy will expire. */\n immutabilityPolicyExpiry?: Date;\n /** Indicates immutability policy mode. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Defines headers for Blob_setImmutabilityPolicy operation. */\nexport interface BlobSetImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_deleteImmutabilityPolicy operation. */\nexport interface BlobDeleteImmutabilityPolicyExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Indicates if the blob has a legal hold. */\n legalHold?: boolean;\n}\n\n/** Defines headers for Blob_setLegalHold operation. */\nexport interface BlobSetLegalHoldExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setMetadata operation. */\nexport interface BlobSetMetadataExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_acquireLease operation. */\nexport interface BlobAcquireLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_releaseLease operation. */\nexport interface BlobReleaseLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_renewLease operation. */\nexport interface BlobRenewLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Uniquely identifies a blobs' lease */\n leaseId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_changeLease operation. */\nexport interface BlobChangeLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** Approximate time remaining in the lease period, in seconds. */\n leaseTime?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n}\n\n/** Defines headers for Blob_breakLease operation. */\nexport interface BlobBreakLeaseExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotHeaders {\n /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */\n snapshot?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_createSnapshot operation. */\nexport interface BlobCreateSnapshotExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_startCopyFromURL operation. */\nexport interface BlobStartCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: SyncCopyStatusType;\n /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */\n contentMD5?: Uint8Array;\n /** This response header is returned so that the client can check for the integrity of the copied content. */\n xMsContentCrc64?: Uint8Array;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_copyFromURL operation. */\nexport interface BlobCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_abortCopyFromURL operation. */\nexport interface BlobAbortCopyFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */\n version?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTier operation. */\nexport interface BlobSetTierExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Identifies the sku name of the account */\n skuName?: SkuName;\n /** Identifies the account kind */\n accountKind?: AccountKind;\n}\n\n/** Defines headers for Blob_getAccountInfo operation. */\nexport interface BlobGetAccountInfoExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n metadata?: { [propertyName: string]: string };\n /** The number of bytes present in the response body. */\n contentLength?: number;\n /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */\n contentType?: string;\n /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */\n contentRange?: string;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header returns the value that was specified for the Content-Encoding request header */\n contentEncoding?: string;\n /** This header is returned if it was previously specified for the blob. */\n cacheControl?: string;\n /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */\n contentDisposition?: string;\n /** This header returns the value that was specified for the Content-Language request header. */\n contentLanguage?: string;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** The blob's type. */\n blobType?: BlobType;\n /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copyCompletionTime?: Date;\n /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyStatusDescription?: string;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */\n copyProgress?: string;\n /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */\n copySource?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */\n leaseDuration?: LeaseDurationType;\n /** Lease state of the blob. */\n leaseState?: LeaseStateType;\n /** The current lease status of the blob. */\n leaseStatus?: LeaseStatusType;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** Indicates that the service supports requests for partial blob content. */\n acceptRanges?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */\n blobContentMD5?: Uint8Array;\n /** Error Code */\n errorCode?: string;\n /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */\n contentCrc64?: Uint8Array;\n}\n\n/** Defines headers for Blob_query operation. */\nexport interface BlobQueryExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_getTags operation. */\nexport interface BlobGetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsHeaders {\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for Blob_setTags operation. */\nexport interface BlobSetTagsExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_create operation. */\nexport interface PageBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPages operation. */\nexport interface PageBlobUploadPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_clearPages operation. */\nexport interface PageBlobClearPagesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The current sequence number for the page blob. */\n blobSequenceNumber?: number;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_uploadPagesFromURL operation. */\nexport interface PageBlobUploadPagesFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRanges operation. */\nexport interface PageBlobGetPageRangesExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_getPageRangesDiff operation. */\nexport interface PageBlobGetPageRangesDiffExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_resize operation. */\nexport interface PageBlobResizeExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */\n blobSequenceNumber?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_updateSequenceNumber operation. */\nexport interface PageBlobUpdateSequenceNumberExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */\n copyId?: string;\n /** State of the copy operation identified by x-ms-copy-id. */\n copyStatus?: CopyStatusType;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for PageBlob_copyIncremental operation. */\nexport interface PageBlobCopyIncrementalExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_create operation. */\nexport interface AppendBlobCreateExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlock operation. */\nexport interface AppendBlobAppendBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */\n blobAppendOffset?: string;\n /** The number of committed blocks present in the blob. This header is returned only for append blobs. */\n blobCommittedBlockCount?: number;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_appendBlockFromUrl operation. */\nexport interface AppendBlobAppendBlockFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** If this blob has been sealed */\n isSealed?: boolean;\n}\n\n/** Defines headers for AppendBlob_seal operation. */\nexport interface AppendBlobSealExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_upload operation. */\nexport interface BlockBlobUploadExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_putBlobFromUrl operation. */\nexport interface BlockBlobPutBlobFromUrlExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlock operation. */\nexport interface BlockBlobStageBlockExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLHeaders {\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_stageBlockFromURL operation. */\nexport interface BlockBlobStageBlockFromURLExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListHeaders {\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n contentMD5?: Uint8Array;\n /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */\n xMsContentCrc64?: Uint8Array;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */\n versionId?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */\n isServerEncrypted?: boolean;\n /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */\n encryptionKeySha256?: string;\n /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */\n encryptionScope?: string;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_commitBlockList operation. */\nexport interface BlockBlobCommitBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListHeaders {\n /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */\n lastModified?: Date;\n /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */\n etag?: string;\n /** The media type of the body of the response. For Get Block List this is 'application/xml' */\n contentType?: string;\n /** The size of the blob in bytes. */\n blobContentLength?: number;\n /** If a client request id header is sent in the request, this header will be present in the response with the same value. */\n clientRequestId?: string;\n /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */\n requestId?: string;\n /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */\n version?: string;\n /** UTC date/time value generated by the service that indicates the time at which the response was initiated */\n date?: Date;\n /** Error Code */\n errorCode?: string;\n}\n\n/** Defines headers for BlockBlob_getBlockList operation. */\nexport interface BlockBlobGetBlockListExceptionHeaders {\n errorCode?: string;\n}\n\n/** Parameter group */\nexport interface ContainerEncryptionScope {\n /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */\n defaultEncryptionScope?: string;\n /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */\n preventEncryptionScopeOverride?: boolean;\n}\n\n/** Parameter group */\nexport interface LeaseAccessConditions {\n /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */\n leaseId?: string;\n}\n\n/** Parameter group */\nexport interface ModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n ifModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n ifUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n ifMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n ifNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n ifTags?: string;\n}\n\n/** Parameter group */\nexport interface CpkInfo {\n /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionKey?: string;\n /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */\n encryptionKeySha256?: string;\n /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is \"AES256\". Must be provided if the x-ms-encryption-key header is provided. */\n encryptionAlgorithm?: EncryptionAlgorithmType;\n}\n\n/** Parameter group */\nexport interface BlobHttpHeaders {\n /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */\n blobCacheControl?: string;\n /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */\n blobContentType?: string;\n /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */\n blobContentMD5?: Uint8Array;\n /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */\n blobContentEncoding?: string;\n /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */\n blobContentLanguage?: string;\n /** Optional. Sets the blob's Content-Disposition header. */\n blobContentDisposition?: string;\n}\n\n/** Parameter group */\nexport interface SourceModifiedAccessConditions {\n /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */\n sourceIfModifiedSince?: Date;\n /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */\n sourceIfUnmodifiedSince?: Date;\n /** Specify an ETag value to operate only on blobs with a matching value. */\n sourceIfMatch?: string;\n /** Specify an ETag value to operate only on blobs without a matching value. */\n sourceIfNoneMatch?: string;\n /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */\n sourceIfTags?: string;\n}\n\n/** Parameter group */\nexport interface SequenceNumberAccessConditions {\n /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */\n ifSequenceNumberLessThanOrEqualTo?: number;\n /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */\n ifSequenceNumberLessThan?: number;\n /** Specify this header value to operate only on a blob if it has the specified sequence number. */\n ifSequenceNumberEqualTo?: number;\n}\n\n/** Parameter group */\nexport interface AppendPositionAccessConditions {\n /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n maxSize?: number;\n /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */\n appendPosition?: number;\n}\n\n/** Known values of {@link BlobExpiryOptions} that the service accepts. */\nexport const enum KnownBlobExpiryOptions {\n NeverExpire = \"NeverExpire\",\n RelativeToCreation = \"RelativeToCreation\",\n RelativeToNow = \"RelativeToNow\",\n Absolute = \"Absolute\"\n}\n\n/**\n * Defines values for BlobExpiryOptions. \\\n * {@link KnownBlobExpiryOptions} can be used interchangeably with BlobExpiryOptions,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **NeverExpire** \\\n * **RelativeToCreation** \\\n * **RelativeToNow** \\\n * **Absolute**\n */\nexport type BlobExpiryOptions = string;\n\n/** Known values of {@link StorageErrorCode} that the service accepts. */\nexport const enum KnownStorageErrorCode {\n AccountAlreadyExists = \"AccountAlreadyExists\",\n AccountBeingCreated = \"AccountBeingCreated\",\n AccountIsDisabled = \"AccountIsDisabled\",\n AuthenticationFailed = \"AuthenticationFailed\",\n AuthorizationFailure = \"AuthorizationFailure\",\n ConditionHeadersNotSupported = \"ConditionHeadersNotSupported\",\n ConditionNotMet = \"ConditionNotMet\",\n EmptyMetadataKey = \"EmptyMetadataKey\",\n InsufficientAccountPermissions = \"InsufficientAccountPermissions\",\n InternalError = \"InternalError\",\n InvalidAuthenticationInfo = \"InvalidAuthenticationInfo\",\n InvalidHeaderValue = \"InvalidHeaderValue\",\n InvalidHttpVerb = \"InvalidHttpVerb\",\n InvalidInput = \"InvalidInput\",\n InvalidMd5 = \"InvalidMd5\",\n InvalidMetadata = \"InvalidMetadata\",\n InvalidQueryParameterValue = \"InvalidQueryParameterValue\",\n InvalidRange = \"InvalidRange\",\n InvalidResourceName = \"InvalidResourceName\",\n InvalidUri = \"InvalidUri\",\n InvalidXmlDocument = \"InvalidXmlDocument\",\n InvalidXmlNodeValue = \"InvalidXmlNodeValue\",\n Md5Mismatch = \"Md5Mismatch\",\n MetadataTooLarge = \"MetadataTooLarge\",\n MissingContentLengthHeader = \"MissingContentLengthHeader\",\n MissingRequiredQueryParameter = \"MissingRequiredQueryParameter\",\n MissingRequiredHeader = \"MissingRequiredHeader\",\n MissingRequiredXmlNode = \"MissingRequiredXmlNode\",\n MultipleConditionHeadersNotSupported = \"MultipleConditionHeadersNotSupported\",\n OperationTimedOut = \"OperationTimedOut\",\n OutOfRangeInput = \"OutOfRangeInput\",\n OutOfRangeQueryParameterValue = \"OutOfRangeQueryParameterValue\",\n RequestBodyTooLarge = \"RequestBodyTooLarge\",\n ResourceTypeMismatch = \"ResourceTypeMismatch\",\n RequestUrlFailedToParse = \"RequestUrlFailedToParse\",\n ResourceAlreadyExists = \"ResourceAlreadyExists\",\n ResourceNotFound = \"ResourceNotFound\",\n ServerBusy = \"ServerBusy\",\n UnsupportedHeader = \"UnsupportedHeader\",\n UnsupportedXmlNode = \"UnsupportedXmlNode\",\n UnsupportedQueryParameter = \"UnsupportedQueryParameter\",\n UnsupportedHttpVerb = \"UnsupportedHttpVerb\",\n AppendPositionConditionNotMet = \"AppendPositionConditionNotMet\",\n BlobAlreadyExists = \"BlobAlreadyExists\",\n BlobImmutableDueToPolicy = \"BlobImmutableDueToPolicy\",\n BlobNotFound = \"BlobNotFound\",\n BlobOverwritten = \"BlobOverwritten\",\n BlobTierInadequateForContentLength = \"BlobTierInadequateForContentLength\",\n BlobUsesCustomerSpecifiedEncryption = \"BlobUsesCustomerSpecifiedEncryption\",\n BlockCountExceedsLimit = \"BlockCountExceedsLimit\",\n BlockListTooLong = \"BlockListTooLong\",\n CannotChangeToLowerTier = \"CannotChangeToLowerTier\",\n CannotVerifyCopySource = \"CannotVerifyCopySource\",\n ContainerAlreadyExists = \"ContainerAlreadyExists\",\n ContainerBeingDeleted = \"ContainerBeingDeleted\",\n ContainerDisabled = \"ContainerDisabled\",\n ContainerNotFound = \"ContainerNotFound\",\n ContentLengthLargerThanTierLimit = \"ContentLengthLargerThanTierLimit\",\n CopyAcrossAccountsNotSupported = \"CopyAcrossAccountsNotSupported\",\n CopyIdMismatch = \"CopyIdMismatch\",\n FeatureVersionMismatch = \"FeatureVersionMismatch\",\n IncrementalCopyBlobMismatch = \"IncrementalCopyBlobMismatch\",\n IncrementalCopyOfEralierVersionSnapshotNotAllowed = \"IncrementalCopyOfEralierVersionSnapshotNotAllowed\",\n IncrementalCopySourceMustBeSnapshot = \"IncrementalCopySourceMustBeSnapshot\",\n InfiniteLeaseDurationRequired = \"InfiniteLeaseDurationRequired\",\n InvalidBlobOrBlock = \"InvalidBlobOrBlock\",\n InvalidBlobTier = \"InvalidBlobTier\",\n InvalidBlobType = \"InvalidBlobType\",\n InvalidBlockId = \"InvalidBlockId\",\n InvalidBlockList = \"InvalidBlockList\",\n InvalidOperation = \"InvalidOperation\",\n InvalidPageRange = \"InvalidPageRange\",\n InvalidSourceBlobType = \"InvalidSourceBlobType\",\n InvalidSourceBlobUrl = \"InvalidSourceBlobUrl\",\n InvalidVersionForPageBlobOperation = \"InvalidVersionForPageBlobOperation\",\n LeaseAlreadyPresent = \"LeaseAlreadyPresent\",\n LeaseAlreadyBroken = \"LeaseAlreadyBroken\",\n LeaseIdMismatchWithBlobOperation = \"LeaseIdMismatchWithBlobOperation\",\n LeaseIdMismatchWithContainerOperation = \"LeaseIdMismatchWithContainerOperation\",\n LeaseIdMismatchWithLeaseOperation = \"LeaseIdMismatchWithLeaseOperation\",\n LeaseIdMissing = \"LeaseIdMissing\",\n LeaseIsBreakingAndCannotBeAcquired = \"LeaseIsBreakingAndCannotBeAcquired\",\n LeaseIsBreakingAndCannotBeChanged = \"LeaseIsBreakingAndCannotBeChanged\",\n LeaseIsBrokenAndCannotBeRenewed = \"LeaseIsBrokenAndCannotBeRenewed\",\n LeaseLost = \"LeaseLost\",\n LeaseNotPresentWithBlobOperation = \"LeaseNotPresentWithBlobOperation\",\n LeaseNotPresentWithContainerOperation = \"LeaseNotPresentWithContainerOperation\",\n LeaseNotPresentWithLeaseOperation = \"LeaseNotPresentWithLeaseOperation\",\n MaxBlobSizeConditionNotMet = \"MaxBlobSizeConditionNotMet\",\n NoAuthenticationInformation = \"NoAuthenticationInformation\",\n NoPendingCopyOperation = \"NoPendingCopyOperation\",\n OperationNotAllowedOnIncrementalCopyBlob = \"OperationNotAllowedOnIncrementalCopyBlob\",\n PendingCopyOperation = \"PendingCopyOperation\",\n PreviousSnapshotCannotBeNewer = \"PreviousSnapshotCannotBeNewer\",\n PreviousSnapshotNotFound = \"PreviousSnapshotNotFound\",\n PreviousSnapshotOperationNotSupported = \"PreviousSnapshotOperationNotSupported\",\n SequenceNumberConditionNotMet = \"SequenceNumberConditionNotMet\",\n SequenceNumberIncrementTooLarge = \"SequenceNumberIncrementTooLarge\",\n SnapshotCountExceeded = \"SnapshotCountExceeded\",\n SnapshotOperationRateExceeded = \"SnapshotOperationRateExceeded\",\n SnapshotsPresent = \"SnapshotsPresent\",\n SourceConditionNotMet = \"SourceConditionNotMet\",\n SystemInUse = \"SystemInUse\",\n TargetConditionNotMet = \"TargetConditionNotMet\",\n UnauthorizedBlobOverwrite = \"UnauthorizedBlobOverwrite\",\n BlobBeingRehydrated = \"BlobBeingRehydrated\",\n BlobArchived = \"BlobArchived\",\n BlobNotArchived = \"BlobNotArchived\",\n AuthorizationSourceIPMismatch = \"AuthorizationSourceIPMismatch\",\n AuthorizationProtocolMismatch = \"AuthorizationProtocolMismatch\",\n AuthorizationPermissionMismatch = \"AuthorizationPermissionMismatch\",\n AuthorizationServiceMismatch = \"AuthorizationServiceMismatch\",\n AuthorizationResourceTypeMismatch = \"AuthorizationResourceTypeMismatch\"\n}\n\n/**\n * Defines values for StorageErrorCode. \\\n * {@link KnownStorageErrorCode} can be used interchangeably with StorageErrorCode,\n * this enum contains the known values that the service supports.\n * ### Know values supported by the service\n * **AccountAlreadyExists** \\\n * **AccountBeingCreated** \\\n * **AccountIsDisabled** \\\n * **AuthenticationFailed** \\\n * **AuthorizationFailure** \\\n * **ConditionHeadersNotSupported** \\\n * **ConditionNotMet** \\\n * **EmptyMetadataKey** \\\n * **InsufficientAccountPermissions** \\\n * **InternalError** \\\n * **InvalidAuthenticationInfo** \\\n * **InvalidHeaderValue** \\\n * **InvalidHttpVerb** \\\n * **InvalidInput** \\\n * **InvalidMd5** \\\n * **InvalidMetadata** \\\n * **InvalidQueryParameterValue** \\\n * **InvalidRange** \\\n * **InvalidResourceName** \\\n * **InvalidUri** \\\n * **InvalidXmlDocument** \\\n * **InvalidXmlNodeValue** \\\n * **Md5Mismatch** \\\n * **MetadataTooLarge** \\\n * **MissingContentLengthHeader** \\\n * **MissingRequiredQueryParameter** \\\n * **MissingRequiredHeader** \\\n * **MissingRequiredXmlNode** \\\n * **MultipleConditionHeadersNotSupported** \\\n * **OperationTimedOut** \\\n * **OutOfRangeInput** \\\n * **OutOfRangeQueryParameterValue** \\\n * **RequestBodyTooLarge** \\\n * **ResourceTypeMismatch** \\\n * **RequestUrlFailedToParse** \\\n * **ResourceAlreadyExists** \\\n * **ResourceNotFound** \\\n * **ServerBusy** \\\n * **UnsupportedHeader** \\\n * **UnsupportedXmlNode** \\\n * **UnsupportedQueryParameter** \\\n * **UnsupportedHttpVerb** \\\n * **AppendPositionConditionNotMet** \\\n * **BlobAlreadyExists** \\\n * **BlobImmutableDueToPolicy** \\\n * **BlobNotFound** \\\n * **BlobOverwritten** \\\n * **BlobTierInadequateForContentLength** \\\n * **BlobUsesCustomerSpecifiedEncryption** \\\n * **BlockCountExceedsLimit** \\\n * **BlockListTooLong** \\\n * **CannotChangeToLowerTier** \\\n * **CannotVerifyCopySource** \\\n * **ContainerAlreadyExists** \\\n * **ContainerBeingDeleted** \\\n * **ContainerDisabled** \\\n * **ContainerNotFound** \\\n * **ContentLengthLargerThanTierLimit** \\\n * **CopyAcrossAccountsNotSupported** \\\n * **CopyIdMismatch** \\\n * **FeatureVersionMismatch** \\\n * **IncrementalCopyBlobMismatch** \\\n * **IncrementalCopyOfEralierVersionSnapshotNotAllowed** \\\n * **IncrementalCopySourceMustBeSnapshot** \\\n * **InfiniteLeaseDurationRequired** \\\n * **InvalidBlobOrBlock** \\\n * **InvalidBlobTier** \\\n * **InvalidBlobType** \\\n * **InvalidBlockId** \\\n * **InvalidBlockList** \\\n * **InvalidOperation** \\\n * **InvalidPageRange** \\\n * **InvalidSourceBlobType** \\\n * **InvalidSourceBlobUrl** \\\n * **InvalidVersionForPageBlobOperation** \\\n * **LeaseAlreadyPresent** \\\n * **LeaseAlreadyBroken** \\\n * **LeaseIdMismatchWithBlobOperation** \\\n * **LeaseIdMismatchWithContainerOperation** \\\n * **LeaseIdMismatchWithLeaseOperation** \\\n * **LeaseIdMissing** \\\n * **LeaseIsBreakingAndCannotBeAcquired** \\\n * **LeaseIsBreakingAndCannotBeChanged** \\\n * **LeaseIsBrokenAndCannotBeRenewed** \\\n * **LeaseLost** \\\n * **LeaseNotPresentWithBlobOperation** \\\n * **LeaseNotPresentWithContainerOperation** \\\n * **LeaseNotPresentWithLeaseOperation** \\\n * **MaxBlobSizeConditionNotMet** \\\n * **NoAuthenticationInformation** \\\n * **NoPendingCopyOperation** \\\n * **OperationNotAllowedOnIncrementalCopyBlob** \\\n * **PendingCopyOperation** \\\n * **PreviousSnapshotCannotBeNewer** \\\n * **PreviousSnapshotNotFound** \\\n * **PreviousSnapshotOperationNotSupported** \\\n * **SequenceNumberConditionNotMet** \\\n * **SequenceNumberIncrementTooLarge** \\\n * **SnapshotCountExceeded** \\\n * **SnapshotOperationRateExceeded** \\\n * **SnapshotsPresent** \\\n * **SourceConditionNotMet** \\\n * **SystemInUse** \\\n * **TargetConditionNotMet** \\\n * **UnauthorizedBlobOverwrite** \\\n * **BlobBeingRehydrated** \\\n * **BlobArchived** \\\n * **BlobNotArchived** \\\n * **AuthorizationSourceIPMismatch** \\\n * **AuthorizationProtocolMismatch** \\\n * **AuthorizationPermissionMismatch** \\\n * **AuthorizationServiceMismatch** \\\n * **AuthorizationResourceTypeMismatch**\n */\nexport type StorageErrorCode = string;\n/** Defines values for GeoReplicationStatusType. */\nexport type GeoReplicationStatusType = \"live\" | \"bootstrap\" | \"unavailable\";\n/** Defines values for ListContainersIncludeType. */\nexport type ListContainersIncludeType = \"metadata\" | \"deleted\" | \"system\";\n/** Defines values for LeaseStatusType. */\nexport type LeaseStatusType = \"locked\" | \"unlocked\";\n/** Defines values for LeaseStateType. */\nexport type LeaseStateType =\n | \"available\"\n | \"leased\"\n | \"expired\"\n | \"breaking\"\n | \"broken\";\n/** Defines values for LeaseDurationType. */\nexport type LeaseDurationType = \"infinite\" | \"fixed\";\n/** Defines values for PublicAccessType. */\nexport type PublicAccessType = \"container\" | \"blob\";\n/** Defines values for SkuName. */\nexport type SkuName =\n | \"Standard_LRS\"\n | \"Standard_GRS\"\n | \"Standard_RAGRS\"\n | \"Standard_ZRS\"\n | \"Premium_LRS\";\n/** Defines values for AccountKind. */\nexport type AccountKind =\n | \"Storage\"\n | \"BlobStorage\"\n | \"StorageV2\"\n | \"FileStorage\"\n | \"BlockBlobStorage\";\n/** Defines values for ListBlobsIncludeItem. */\nexport type ListBlobsIncludeItem =\n | \"copy\"\n | \"deleted\"\n | \"metadata\"\n | \"snapshots\"\n | \"uncommittedblobs\"\n | \"versions\"\n | \"tags\"\n | \"immutabilitypolicy\"\n | \"legalhold\"\n | \"deletedwithversions\";\n/** Defines values for BlobType. */\nexport type BlobType = \"BlockBlob\" | \"PageBlob\" | \"AppendBlob\";\n/** Defines values for CopyStatusType. */\nexport type CopyStatusType = \"pending\" | \"success\" | \"aborted\" | \"failed\";\n/** Defines values for AccessTier. */\nexport type AccessTier =\n | \"P4\"\n | \"P6\"\n | \"P10\"\n | \"P15\"\n | \"P20\"\n | \"P30\"\n | \"P40\"\n | \"P50\"\n | \"P60\"\n | \"P70\"\n | \"P80\"\n | \"Hot\"\n | \"Cool\"\n | \"Archive\";\n/** Defines values for ArchiveStatus. */\nexport type ArchiveStatus =\n | \"rehydrate-pending-to-hot\"\n | \"rehydrate-pending-to-cool\";\n/** Defines values for RehydratePriority. */\nexport type RehydratePriority = \"High\" | \"Standard\";\n/** Defines values for BlobImmutabilityPolicyMode. */\nexport type BlobImmutabilityPolicyMode = \"Mutable\" | \"Unlocked\" | \"Locked\";\n/** Defines values for DeleteSnapshotsOptionType. */\nexport type DeleteSnapshotsOptionType = \"include\" | \"only\";\n/** Defines values for BlobCopySourceTags. */\nexport type BlobCopySourceTags = \"REPLACE\" | \"COPY\";\n/** Defines values for BlockListType. */\nexport type BlockListType = \"committed\" | \"uncommitted\" | \"all\";\n/** Defines values for SequenceNumberActionType. */\nexport type SequenceNumberActionType = \"max\" | \"update\" | \"increment\";\n/** Defines values for QueryFormatType. */\nexport type QueryFormatType = \"delimited\" | \"json\" | \"arrow\" | \"parquet\";\n/** Defines values for SyncCopyStatusType. */\nexport type SyncCopyStatusType = \"success\";\n/** Defines values for EncryptionAlgorithmType. */\nexport type EncryptionAlgorithmType = \"AES256\";\n\n/** Optional parameters. */\nexport interface ServiceSetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setProperties operation. */\nexport type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders &\n BlobServiceProperties & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceProperties;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetPropertiesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetStatisticsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getStatistics operation. */\nexport type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders &\n BlobServiceStatistics & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobServiceStatistics;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetStatisticsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceListContainersSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify that the container's metadata be returned as part of the response body. */\n include?: ListContainersIncludeType[];\n}\n\n/** Contains response data for the listContainersSegment operation. */\nexport type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders &\n ListContainersSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListContainersSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceListContainersSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ServiceGetUserDelegationKeyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getUserDelegationKey operation. */\nexport type ServiceGetUserDelegationKeyResponse = ServiceGetUserDelegationKeyHeaders &\n UserDelegationKey & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: UserDelegationKey;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ServiceSubmitBatchResponse = ServiceSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ServiceFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ServiceFilterBlobsResponse = ServiceFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ServiceFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n containerEncryptionScope?: ContainerEncryptionScope;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n}\n\n/** Contains response data for the create operation. */\nexport type ContainerCreateResponse = ContainerCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerDeleteOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type ContainerDeleteResponse = ContainerDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerGetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the getAccessPolicy operation. */\nexport type ContainerGetAccessPolicyResponse = ContainerGetAccessPolicyHeaders &\n SignedIdentifier[] & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: SignedIdentifier[];\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerSetAccessPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies whether data in the container may be accessed publicly and the level of access */\n access?: PublicAccessType;\n /** the acls for the container */\n containerAcl?: SignedIdentifier[];\n}\n\n/** Contains response data for the setAccessPolicy operation. */\nexport type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSetAccessPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRestoreOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the name of the deleted container to restore. */\n deletedContainerName?: string;\n /** Optional. Version 2019-12-12 and later. Specifies the version of the deleted container to restore. */\n deletedContainerVersion?: string;\n}\n\n/** Contains response data for the restore operation. */\nexport type ContainerRestoreResponse = ContainerRestoreHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRestoreHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenameOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A lease ID for the source path. If specified, the source path must have an active lease and the lease ID must match. */\n sourceLeaseId?: string;\n}\n\n/** Contains response data for the rename operation. */\nexport type ContainerRenameResponse = ContainerRenameHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenameHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerSubmitBatchOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the submitBatch operation. */\nexport type ContainerSubmitBatchResponse = ContainerSubmitBatchHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerSubmitBatchHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerFilterBlobsOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Filters the results to return only to return only blobs whose tags match the specified expression. */\n where?: string;\n}\n\n/** Contains response data for the filterBlobs operation. */\nexport type ContainerFilterBlobsResponse = ContainerFilterBlobsHeaders &\n FilterBlobSegment & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: FilterBlobSegment;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerFilterBlobsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type ContainerAcquireLeaseResponse = ContainerAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type ContainerReleaseLeaseResponse = ContainerReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type ContainerRenewLeaseResponse = ContainerRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type ContainerBreakLeaseResponse = ContainerBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type ContainerChangeLeaseResponse = ContainerChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface ContainerListBlobFlatSegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobFlatSegment operation. */\nexport type ContainerListBlobFlatSegmentResponse = ContainerListBlobFlatSegmentHeaders &\n ListBlobsFlatSegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsFlatSegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface ContainerListBlobHierarchySegmentOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Filters the results to return only containers whose name begins with the specified prefix. */\n prefix?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** Include this parameter to specify one or more datasets to include in the response. */\n include?: ListBlobsIncludeItem[];\n}\n\n/** Contains response data for the listBlobHierarchySegment operation. */\nexport type ContainerListBlobHierarchySegmentResponse = ContainerListBlobHierarchySegmentHeaders &\n ListBlobsHierarchySegmentResponse & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: ListBlobsHierarchySegmentResponse;\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n };\n };\n\n/** Contains response data for the getAccountInfo operation. */\nexport type ContainerGetAccountInfoResponse = ContainerGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: ContainerGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDownloadOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentMD5?: boolean;\n /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */\n rangeGetContentCRC64?: boolean;\n}\n\n/** Contains response data for the download operation. */\nexport type BlobDownloadResponse = BlobDownloadHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDownloadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetPropertiesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getProperties operation. */\nexport type BlobGetPropertiesResponse = BlobGetPropertiesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetPropertiesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Required if the blob has associated snapshots. Specify one of the following two options: include: Delete the base blob and all of its snapshots. only: Delete only the blob's snapshots and not the blob itself */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /** Optional. Only possible value is 'permanent', which specifies to permanently delete a blob if blob soft delete is enabled. */\n blobDeleteType?: string;\n}\n\n/** Contains response data for the delete operation. */\nexport type BlobDeleteResponse = BlobDeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobUndeleteOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the undelete operation. */\nexport type BlobUndeleteResponse = BlobUndeleteHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobUndeleteHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetExpiryOptionalParams extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The time to set the blob to expiry */\n expiresOn?: string;\n}\n\n/** Contains response data for the setExpiry operation. */\nexport type BlobSetExpiryResponse = BlobSetExpiryHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetExpiryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetHttpHeadersOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setHttpHeaders operation. */\nexport type BlobSetHttpHeadersResponse = BlobSetHttpHeadersHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetHttpHeadersHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n}\n\n/** Contains response data for the setImmutabilityPolicy operation. */\nexport type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobDeleteImmutabilityPolicyOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the deleteImmutabilityPolicy operation. */\nexport type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobDeleteImmutabilityPolicyHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetLegalHoldOptionalParams\n extends coreHttp.OperationOptions {\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the setLegalHold operation. */\nexport type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetLegalHoldHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetMetadataOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the setMetadata operation. */\nexport type BlobSetMetadataResponse = BlobSetMetadataHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetMetadataHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAcquireLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Specifies the duration of the lease, in seconds, or negative one (-1) for a lease that never expires. A non-infinite lease can be between 15 and 60 seconds. A lease duration cannot be changed using renew or change. */\n duration?: number;\n /** Proposed lease ID, in a GUID string format. The Blob service returns 400 (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor (String) for a list of valid GUID string formats. */\n proposedLeaseId?: string;\n}\n\n/** Contains response data for the acquireLease operation. */\nexport type BlobAcquireLeaseResponse = BlobAcquireLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAcquireLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobReleaseLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the releaseLease operation. */\nexport type BlobReleaseLeaseResponse = BlobReleaseLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobReleaseLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobRenewLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the renewLease operation. */\nexport type BlobRenewLeaseResponse = BlobRenewLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobRenewLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobChangeLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the changeLease operation. */\nexport type BlobChangeLeaseResponse = BlobChangeLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobChangeLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobBreakLeaseOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */\n breakPeriod?: number;\n}\n\n/** Contains response data for the breakLease operation. */\nexport type BlobBreakLeaseResponse = BlobBreakLeaseHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobBreakLeaseHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCreateSnapshotOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the createSnapshot operation. */\nexport type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCreateSnapshotHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobStartCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Overrides the sealed state of the destination blob. Service version 2019-12-12 and newer. */\n sealBlob?: boolean;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the startCopyFromURL operation. */\nexport type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobStartCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/** Contains response data for the copyFromURL operation. */\nexport type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobAbortCopyFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the abortCopyFromURL operation. */\nexport type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobAbortCopyFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobSetTierOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Optional: Indicates the priority with which to rehydrate an archived blob. */\n rehydratePriority?: RehydratePriority;\n}\n\n/** Contains response data for the setTier operation. */\nexport type BlobSetTierResponse = BlobSetTierHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTierHeaders;\n };\n};\n\n/** Contains response data for the getAccountInfo operation. */\nexport type BlobGetAccountInfoResponse = BlobGetAccountInfoHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetAccountInfoHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobQueryOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** the query request */\n queryRequest?: QueryRequest;\n}\n\n/** Contains response data for the query operation. */\nexport type BlobQueryResponse = BlobQueryHeaders & {\n /**\n * BROWSER ONLY\n *\n * The response body as a browser Blob.\n * Always `undefined` in node.js.\n */\n blobBody?: Promise;\n /**\n * NODEJS ONLY\n *\n * The response body as a node.js Readable stream.\n * Always `undefined` in the browser.\n */\n readableStreamBody?: NodeJS.ReadableStream;\n\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobQueryHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlobGetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n}\n\n/** Contains response data for the getTags operation. */\nexport type BlobGetTagsResponse = BlobGetTagsHeaders &\n BlobTags & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlobTags;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobGetTagsHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface BlobSetTagsOptionalParams extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */\n versionId?: string;\n /** Blob tags */\n tags?: BlobTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the setTags operation. */\nexport type BlobSetTagsResponse = BlobSetTagsHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlobSetTagsHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the create operation. */\nexport type PageBlobCreateResponse = PageBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPages operation. */\nexport type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobClearPagesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the clearPages operation. */\nexport type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobClearPagesHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUploadPagesFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n sequenceNumberAccessConditions?: SequenceNumberAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the uploadPagesFromURL operation. */\nexport type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUploadPagesFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n}\n\n/** Contains response data for the getPageRanges operation. */\nexport type PageBlobGetPageRangesResponse = PageBlobGetPageRangesHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobGetPageRangesDiffOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** A string value that identifies the portion of the list of containers to be returned with the next listing operation. The operation returns the ContinuationToken value within the response body if the listing operation did not return all containers remaining to be listed with the current page. The NextMarker value can be used as the value for the marker parameter in a subsequent call to request the next page of list items. The marker value is opaque to the client. */\n marker?: string;\n /** Specifies the maximum number of containers to return. If the request does not specify maxresults, or specifies a value greater than 5000, the server will return up to 5000 items. Note that if the listing operation crosses a partition boundary, then the service will return a continuation token for retrieving the remainder of the results. For this reason, it is possible that the service will return fewer results than specified by maxresults, or than the default of 5000. */\n maxPageSize?: number;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n /** Return only the bytes of the blob in the specified range. */\n range?: string;\n /** Optional in version 2015-07-08 and newer. The prevsnapshot parameter is a DateTime value that specifies that the response will contain only pages that were changed between target blob and previous snapshot. Changed pages include both updated and cleared pages. The target blob may be a snapshot, as long as the snapshot specified by prevsnapshot is the older of the two. Note that incremental snapshots are currently supported only for blobs created on or after January 1, 2016. */\n prevsnapshot?: string;\n /** Optional. This header is only supported in service versions 2019-04-19 and after and specifies the URL of a previous snapshot of the target blob. The response will only contain pages that were changed between the target blob and its previous snapshot. */\n prevSnapshotUrl?: string;\n}\n\n/** Contains response data for the getPageRangesDiff operation. */\nexport type PageBlobGetPageRangesDiffResponse = PageBlobGetPageRangesDiffHeaders &\n PageList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: PageList;\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface PageBlobResizeOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n}\n\n/** Contains response data for the resize operation. */\nexport type PageBlobResizeResponse = PageBlobResizeHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobResizeHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobUpdateSequenceNumberOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Set for page blobs only. The sequence number is a user-controlled value that you can use to track requests. The value of the sequence number must be between 0 and 2^63 - 1. */\n blobSequenceNumber?: number;\n}\n\n/** Contains response data for the updateSequenceNumber operation. */\nexport type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobUpdateSequenceNumberHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface PageBlobCopyIncrementalOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the copyIncremental operation. */\nexport type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: PageBlobCopyIncrementalHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobCreateOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n}\n\n/** Contains response data for the create operation. */\nexport type AppendBlobCreateResponse = AppendBlobCreateHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobCreateHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the appendBlock operation. */\nexport type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobAppendBlockFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the appendBlockFromUrl operation. */\nexport type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobAppendBlockFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface AppendBlobSealOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n appendPositionAccessConditions?: AppendPositionAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n}\n\n/** Contains response data for the seal operation. */\nexport type AppendBlobSealResponse = AppendBlobSealHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: AppendBlobSealHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobUploadOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n}\n\n/** Contains response data for the upload operation. */\nexport type BlockBlobUploadResponse = BlockBlobUploadHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobUploadHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobPutBlobFromUrlOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by x-ms-tags. */\n copySourceTags?: BlobCopySourceTags;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Optional, default is true. Indicates if properties from the source blob should be copied. */\n copySourceBlobProperties?: boolean;\n}\n\n/** Contains response data for the putBlobFromUrl operation. */\nexport type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobPutBlobFromUrlHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the stageBlock operation. */\nexport type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobStageBlockFromURLOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n sourceModifiedAccessConditions?: SourceModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Specify the md5 calculated for the range of bytes that must be read from the copy source. */\n sourceContentMD5?: Uint8Array;\n /** Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. */\n copySourceAuthorization?: string;\n /** Specify the crc64 calculated for the range of bytes that must be read from the copy source. */\n sourceContentCrc64?: Uint8Array;\n /** Bytes of source data in the specified range. */\n sourceRange?: string;\n}\n\n/** Contains response data for the stageBlockFromURL operation. */\nexport type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobStageBlockFromURLHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobCommitBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** Parameter group */\n cpkInfo?: CpkInfo;\n /** Parameter group */\n blobHttpHeaders?: BlobHttpHeaders;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value pairs are specified, the operation will copy the metadata from the source blob or file to the destination blob. If one or more name-value pairs are specified, the destination blob is created with the specified metadata, and metadata is not copied from the source blob or file. Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more information. */\n metadata?: { [propertyName: string]: string };\n /** Specifies the date time when the blobs immutability policy is set to expire. */\n immutabilityPolicyExpiry?: Date;\n /** Specifies the immutability policy mode to set on the blob. */\n immutabilityPolicyMode?: BlobImmutabilityPolicyMode;\n /** Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. */\n encryptionScope?: string;\n /** Optional. Indicates the tier to be set on the blob. */\n tier?: AccessTier;\n /** Optional. Used to set blob tags in various blob operations. */\n blobTagsString?: string;\n /** Specified if a legal hold should be set on the blob. */\n legalHold?: boolean;\n /** Specify the transactional md5 for the body, to be validated by the service. */\n transactionalContentMD5?: Uint8Array;\n /** Specify the transactional crc64 for the body, to be validated by the service. */\n transactionalContentCrc64?: Uint8Array;\n}\n\n/** Contains response data for the commitBlockList operation. */\nexport type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobCommitBlockListHeaders;\n };\n};\n\n/** Optional parameters. */\nexport interface BlockBlobGetBlockListOptionalParams\n extends coreHttp.OperationOptions {\n /** Parameter group */\n leaseAccessConditions?: LeaseAccessConditions;\n /** Parameter group */\n modifiedAccessConditions?: ModifiedAccessConditions;\n /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */\n timeoutInSeconds?: number;\n /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */\n requestId?: string;\n /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */\n snapshot?: string;\n}\n\n/** Contains response data for the getBlockList operation. */\nexport type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders &\n BlockList & {\n /** The underlying HTTP response. */\n _response: coreHttp.HttpResponse & {\n /** The response body as text (string format) */\n bodyAsText: string;\n\n /** The response body as parsed JSON or XML */\n parsedBody: BlockList;\n /** The parsed HTTP response headers. */\n parsedHeaders: BlockBlobGetBlockListHeaders;\n };\n };\n\n/** Optional parameters. */\nexport interface StorageClientOptionalParams\n extends coreHttp.ServiceClientOptions {\n /** Specifies the version of the operation to use for this request. */\n version?: string;\n /** Overrides client endpoint. */\n endpoint?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js new file mode 100644 index 0000000000..c088f044c5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js @@ -0,0 +1,8192 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +export const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +export const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +export const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +export const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +export const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +export const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +export const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +export const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +export const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +export const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +export const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +export const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +export const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +export const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +export const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; +export const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +export const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +export const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +export const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +export const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +export const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +export const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +export const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +export const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +export const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +export const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +export const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +export const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +export const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +export const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +export const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +export const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +export const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +export const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +export const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +export const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +export const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +export const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +export const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +//# sourceMappingURL=mappers.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map new file mode 100644 index 0000000000..a26e233834 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mappers.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/mappers.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAIH,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,0BAA0B;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,oBAAoB,EAAE;gBACpB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,SAAS;iBACrB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,UAAU;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,UAAU;yBACtB;qBACF;iBACF;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,eAAe;iBAC3B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,IAAI,EAAE;gBACJ,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,iBAAiB;iBAC7B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,WAAW,EAAE;oBACX,gBAAgB,EAAE,CAAC;iBACpB;gBACD,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,oBAAoB,EAAE;gBACpB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,qBAAqB;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,gBAAgB;iBAC5B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;iBACpD;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,+BAA+B;IAC/C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,eAAe;yBAC3B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAA6B;IACrD,cAAc,EAAE,eAAe;IAC/B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,eAAe;QAC1B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,uCAAuC;gBACvD,OAAO,EAAE,uCAAuC;gBAChD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,aAAa;gBAC7B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,cAAc;gBAC9B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,mBAAmB;IACnC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,gBAAgB;yBAC5B;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA6B;IACtD,cAAc,EAAE,gBAAgB;IAChC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gBAAgB;QAC3B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,KAAK;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,SAAS;yBACrB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA6B;IAC/C,cAAc,EAAE,SAAS;IACzB,OAAO,EAAE,KAAK;IACd,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,SAAS;QACpB,eAAe,EAAE;YACf,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,kBAAkB;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,EAAE,EAAE;gBACF,cAAc,EAAE,IAAI;gBACpB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,IAAI;gBACb,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,cAAc;iBAC1B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,8BAA8B;IAC9C,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,qBAAqB;iBACjC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,qBAAqB;IACrC,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,kBAAkB;IAClC,OAAO,EAAE,MAAM;IACf,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,wBAAwB;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,wBAAwB;IACxC,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;qBACV;iBACF;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;qBAC5B;iBACF;aACF;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,sBAAsB;gBACtC,OAAO,EAAE,sBAAsB;gBAC/B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,UAAU;gBAC1B,OAAO,EAAE,UAAU;gBACnB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,6BAA6B;gBAC7C,OAAO,EAAE,6BAA6B;gBACtC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,mCAAmC;IACnD,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,iBAAiB;gBAC1B,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,eAAe;gBAC/B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,eAAe;gBACxB,cAAc,EAAE,IAAI;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,0BAA0B;iBACtC;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,0BAA0B;IAC1C,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,MAAM;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,kBAAkB;yBAC9B;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,UAAU;iBACtB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAA6B;IACvD,cAAc,EAAE,iBAAiB;IACjC,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iBAAiB;QAC5B,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,cAAc,EAAE,aAAa;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,cAAc,EAAE,QAAQ;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,QAAQ;yBACf;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,OAAO;yBACnB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA6B;IAC7C,cAAc,EAAE,OAAO;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,OAAO;QAClB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA6B;IAChD,cAAc,EAAE,UAAU;IAC1B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,UAAU;QACrB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,cAAc,EAAE,WAAW;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,WAAW;yBACvB;qBACF;iBACF;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,cAAc,EAAE,YAAY;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA6B;IACjD,cAAc,EAAE,WAAW;IAC3B,OAAO,EAAE,WAAW;IACpB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,WAAW;QACtB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,YAAY;IACrB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,GAAG,EAAE;gBACH,cAAc,EAAE,KAAK;gBACrB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,KAAK;gBACd,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA6B;IACpD,cAAc,EAAE,cAAc;IAC9B,OAAO,EAAE,cAAc;IACvB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,cAAc;QACzB,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,OAAO,EAAE,QAAQ;gBACjB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,aAAa;iBACzB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA6B;IACnD,cAAc,EAAE,aAAa;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,aAAa;QACxB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;iBACzD;aACF;YACD,0BAA0B,EAAE;gBAC1B,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,4BAA4B;iBACxC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,uBAAuB;iBACnC;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,oBAAoB;iBAChC;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,KAAK;iBACZ;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,4BAA4B;IAC5C,OAAO,EAAE,4BAA4B;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,YAAY;gBAC5B,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,YAAY;gBACrB,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,uBAAuB;IACvC,OAAO,EAAE,uBAAuB;IAChC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,oBAAoB;IACpC,OAAO,EAAE,oBAAoB;IAC7B,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,MAAM,EAAE;gBACN,cAAc,EAAE,QAAQ;gBACxB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,QAAQ;gBACjB,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,OAAO;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE;4BACJ,IAAI,EAAE,WAAW;4BACjB,SAAS,EAAE,YAAY;yBACxB;qBACF;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAA6B;IAClD,cAAc,EAAE,YAAY;IAC5B,OAAO,EAAE,OAAO;IAChB,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,YAAY;QACvB,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,QAAQ,EAAE,IAAI;gBACd,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,KAAK,EAAE;gBACL,cAAc,EAAE,OAAO;gBACvB,OAAO,EAAE,OAAO;gBAChB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2CAA2C,GAA6B;IACnF,cAAc,EAAE,8CAA8C;IAC9D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6CAA6C;QACxD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;YACD,8BAA8B,EAAE;gBAC9B,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,uCAAuC,EAAE;gBACvC,cAAc,EAAE,gDAAgD;gBAChE,OAAO,EAAE,gDAAgD;gBACzD,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAA6B;IAC9E,cAAc,EAAE,yCAAyC;IACzD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wCAAwC;QACnD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;iBACrC;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iDAAiD,GAA6B;IACzF,cAAc,EAAE,oDAAoD;IACpE,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mDAAmD;QAC9D,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAA6B;IAChE,cAAc,EAAE,2BAA2B;IAC3C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0BAA0B;QACrC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,YAAY;aACrC;YACD,yBAAyB,EAAE;gBACzB,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,SAAS;gBACzB,OAAO,EAAE,SAAS;gBAClB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;gBACD,sBAAsB,EAAE,UAAU;aACnC;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,gCAAgC;gBAChD,OAAO,EAAE,gCAAgC;gBACzC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;iBACpC;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,2BAA2B,EAAE;gBAC3B,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAA6B;IACzD,cAAc,EAAE,oBAAoB;IACpC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mBAAmB;QAC9B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAA6B;IAC3D,cAAc,EAAE,sBAAsB;IACtC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qBAAqB;QAChC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAA6B;IAC5D,cAAc,EAAE,uBAAuB;IACvC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sBAAsB;QACjC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,6BAA6B,GAA6B;IACrE,cAAc,EAAE,gCAAgC;IAChD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,+BAA+B;QAC1C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,wBAAwB,EAAE;gBACxB,cAAc,EAAE,qCAAqC;gBACrD,OAAO,EAAE,qCAAqC;gBAC9C,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,sBAAsB,EAAE;gBACtB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,QAAQ,EAAE;gBACR,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,YAAY,EAAE,SAAS;gBACvB,UAAU,EAAE,IAAI;gBAChB,cAAc,EAAE,kBAAkB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oCAAoC,GAA6B;IAC5E,cAAc,EAAE,uCAAuC;IACvD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,sCAAsC;QACjD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;qBACd;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;qBACnB;iBACF;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA6B;IACxD,cAAc,EAAE,mBAAmB;IACnC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kBAAkB;QAC7B,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,WAAW;gBAC3B,OAAO,EAAE,WAAW;gBACpB,IAAI,EAAE;oBACJ,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;iBACpC;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,gBAAgB;gBAChC,OAAO,EAAE,gBAAgB;gBACzB,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;iBACvD;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,qBAAqB,EAAE;gBACrB,cAAc,EAAE,8BAA8B;gBAC9C,OAAO,EAAE,8BAA8B;gBACvC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,aAAa,EAAE;gBACb,cAAc,EAAE,qBAAqB;gBACrC,OAAO,EAAE,qBAAqB;gBAC9B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;iBACrC;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;qBACT;iBACF;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;iBACtC;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,cAAc,EAAE;gBACd,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAA6B;IAC1D,cAAc,EAAE,qBAAqB;IACrC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oBAAoB;QAC/B,eAAe,EAAE;YACf,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,2BAA2B,GAA6B;IACnE,cAAc,EAAE,8BAA8B;IAC9C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,6BAA6B;QACxC,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAA6B;IACjE,cAAc,EAAE,4BAA4B;IAC5C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2BAA2B;QACtC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kCAAkC,GAA6B;IAC1E,cAAc,EAAE,qCAAqC;IACrD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,oCAAoC;QAC/C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yCAAyC,GAA6B;IACjF,cAAc,EAAE,4CAA4C;IAC5D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,2CAA2C;QACtD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,kBAAkB,EAAE;gBAClB,cAAc,EAAE,2BAA2B;gBAC3C,OAAO,EAAE,2BAA2B;gBACpC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,MAAM,EAAE;gBACN,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;iBAC3D;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAA6B;IAC/D,cAAc,EAAE,0BAA0B;IAC1C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yBAAyB;QACpC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gCAAgC,GAA6B;IACxE,cAAc,EAAE,mCAAmC;IACnD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,kCAAkC;QAC7C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,gBAAgB,EAAE;gBAChB,cAAc,EAAE,yBAAyB;gBACzC,OAAO,EAAE,yBAAyB;gBAClC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,uBAAuB,EAAE;gBACvB,cAAc,EAAE,iCAAiC;gBACjD,OAAO,EAAE,iCAAiC;gBAC1C,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4CAA4C,GAA6B;IACpF,cAAc,EAAE,+CAA+C;IAC/D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8CAA8C;QACzD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAA6B;IAC7D,cAAc,EAAE,wBAAwB;IACxC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uBAAuB;QAClC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,QAAQ,EAAE;gBACR,cAAc,EAAE,kBAAkB;gBAClC,OAAO,EAAE,kBAAkB;gBAC3B,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAA6B;IAC9D,cAAc,EAAE,yBAAyB;IACzC,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,wBAAwB;QACnC,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAA6B;IACtE,cAAc,EAAE,iCAAiC;IACjD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,gCAAgC;QAC3C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uCAAuC,GAA6B;IAC/E,cAAc,EAAE,0CAA0C;IAC1D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,yCAAyC;QACpD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0BAA0B,GAA6B;IAClE,cAAc,EAAE,6BAA6B;IAC7C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4BAA4B;QACvC,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mCAAmC,GAA6B;IAC3E,cAAc,EAAE,sCAAsC;IACtD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,qCAAqC;QAChD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAA6B;IACzE,cAAc,EAAE,oCAAoC;IACpD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,mCAAmC;QAC9C,eAAe,EAAE;YACf,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,0CAA0C,GAA6B;IAClF,cAAc,EAAE,6CAA6C;IAC7D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,4CAA4C;QACvD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,+BAA+B,GAA6B;IACvE,cAAc,EAAE,kCAAkC;IAClD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,iCAAiC;QAC5C,eAAe,EAAE;YACf,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,UAAU,EAAE;gBACV,cAAc,EAAE,aAAa;gBAC7B,OAAO,EAAE,aAAa;gBACtB,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,oBAAoB;gBACpC,OAAO,EAAE,oBAAoB;gBAC7B,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;iBAClB;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,+BAA+B;gBAC/C,OAAO,EAAE,+BAA+B;gBACxC,IAAI,EAAE;oBACJ,IAAI,EAAE,SAAS;iBAChB;aACF;YACD,mBAAmB,EAAE;gBACnB,cAAc,EAAE,4BAA4B;gBAC5C,OAAO,EAAE,4BAA4B;gBACrC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,uBAAuB;gBACvC,OAAO,EAAE,uBAAuB;gBAChC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAA6B;IAChF,cAAc,EAAE,2CAA2C;IAC3D,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,0CAA0C;QACrD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,4BAA4B,GAA6B;IACpE,cAAc,EAAE,+BAA+B;IAC/C,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,8BAA8B;QACzC,eAAe,EAAE;YACf,YAAY,EAAE;gBACZ,cAAc,EAAE,eAAe;gBAC/B,OAAO,EAAE,eAAe;gBACxB,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,WAAW,EAAE;gBACX,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,iBAAiB,EAAE;gBACjB,cAAc,EAAE,0BAA0B;gBAC1C,OAAO,EAAE,0BAA0B;gBACnC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,eAAe,EAAE;gBACf,cAAc,EAAE,wBAAwB;gBACxC,OAAO,EAAE,wBAAwB;gBACjC,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,OAAO,EAAE;gBACP,cAAc,EAAE,cAAc;gBAC9B,OAAO,EAAE,cAAc;gBACvB,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;YACD,IAAI,EAAE;gBACJ,cAAc,EAAE,MAAM;gBACtB,OAAO,EAAE,MAAM;gBACf,IAAI,EAAE;oBACJ,IAAI,EAAE,iBAAiB;iBACxB;aACF;YACD,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qCAAqC,GAA6B;IAC7E,cAAc,EAAE,wCAAwC;IACxD,IAAI,EAAE;QACJ,IAAI,EAAE,WAAW;QACjB,SAAS,EAAE,uCAAuC;QAClD,eAAe,EAAE;YACf,SAAS,EAAE;gBACT,cAAc,EAAE,iBAAiB;gBACjC,OAAO,EAAE,iBAAiB;gBAC1B,IAAI,EAAE;oBACJ,IAAI,EAAE,QAAQ;iBACf;aACF;SACF;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js new file mode 100644 index 0000000000..79defc2b99 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js @@ -0,0 +1,1608 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { QueryCollectionFormat } from "@azure/core-http"; +import { BlobServiceProperties as BlobServicePropertiesMapper, KeyInfo as KeyInfoMapper, QueryRequest as QueryRequestMapper, BlobTags as BlobTagsMapper, BlockLookupList as BlockLookupListMapper } from "../models/mappers"; +export const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServicePropertiesMapper +}; +export const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +export const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +export const version = { + parameterPath: "version", + mapper: { + defaultValue: "2021-08-06", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +export const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +export const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +export const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +export const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +export const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfoMapper +}; +export const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +export const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +export const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +export const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +export const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +export const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +export const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +export const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +export const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +export const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +export const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +export const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +export const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +export const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +export const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +export const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +export const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +export const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +export const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: QueryCollectionFormat.Csv +}; +export const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +export const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +export const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +export const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +export const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +export const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +export const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +export const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +export const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +export const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +export const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +export const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +export const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +export const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +export const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +export const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +export const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +export const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +export const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +export const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +export const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +export const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +export const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +export const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +export const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +export const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +export const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +export const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +export const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +export const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +export const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +export const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +export const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +export const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +export const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +export const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +export const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +export const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +export const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequestMapper +}; +export const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTagsMapper +}; +export const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +export const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +export const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +export const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +export const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +export const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +export const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +export const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +export const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +export const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +export const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +export const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +export const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +export const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +export const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +export const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +export const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +export const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +export const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +export const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +export const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +export const blocks = { + parameterPath: "blocks", + mapper: BlockLookupListMapper +}; +export const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +export const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; +//# sourceMappingURL=parameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map new file mode 100644 index 0000000000..0a2126fb07 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parameters.js","sourceRoot":"","sources":["../../../../../../src/generated/src/models/parameters.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EAIL,qBAAqB,EACtB,MAAM,kBAAkB,CAAC;AAC1B,OAAO,EACL,qBAAqB,IAAI,2BAA2B,EACpD,OAAO,IAAI,aAAa,EACxB,YAAY,IAAI,kBAAkB,EAClC,QAAQ,IAAI,cAAc,EAC1B,eAAe,IAAI,qBAAqB,EACzC,MAAM,mBAAmB,CAAC;AAE3B,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE,uBAAuB;IACtC,MAAM,EAAE,2BAA2B;CACpC,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,GAAG,GAA0B;IACxC,aAAa,EAAE,KAAK;IACpB,MAAM,EAAE;QACN,cAAc,EAAE,KAAK;QACrB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,KAAK;QACd,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;IACD,YAAY,EAAE,IAAI;CACnB,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAA4B;IAC3C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAA4B;IACvD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAA4B;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,WAAW,EAAE;YACX,gBAAgB,EAAE,CAAC;SACpB;QACD,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,2BAA2B;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;iBACjD;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE,aAAa;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,mBAAmB;QACjC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,OAAO;QACvB,OAAO,EAAE,OAAO;QAChB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,SAAS;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;SACpC;QACD,sBAAsB,EAAE,YAAY;KACrC;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;SACrC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;KACzB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,8BAA8B,GAAuB;IAChE,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;KACjC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;IAC9D,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;IACzE,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;IAC3E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,KAAK;QACnB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,mBAAmB;QAC5B,YAAY,EAAE,IAAI;QAClB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,WAAW;oBACjB,SAAS,EAAE,kBAAkB;iBAC9B;aACF;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAA4B;IAC5C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,qBAAqB;IACpC,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;IAC3C,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,SAAS;QACvB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,mBAAmB;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,OAAO,EAAE,SAAS;QAClB,cAAc,EAAE,sBAAsB;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,UAAU;YAChB,OAAO,EAAE;gBACP,IAAI,EAAE;oBACJ,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;qBACtB;iBACF;aACF;SACF;KACF;IACD,gBAAgB,EAAE,qBAAqB,CAAC,GAAG;CAC5C,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAA4B;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;IACnC,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;IAClD,MAAM,EAAE;QACN,cAAc,EAAE,8BAA8B;QAC9C,OAAO,EAAE,8BAA8B;QACvC,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;IAC5D,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,UAAU;QAC1B,OAAO,EAAE,UAAU;QACnB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;IACrE,MAAM,EAAE;QACN,cAAc,EAAE,eAAe;QAC/B,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAA4B;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,eAAe;IAC9B,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;IACjE,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;IAChE,MAAM,EAAE;QACN,cAAc,EAAE,wBAAwB;QACxC,OAAO,EAAE,wBAAwB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;IAC/D,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,mBAAmB,GAAuB;IACrD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;IACpE,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,sBAAsB;QACpC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,qCAAqC;QACrD,OAAO,EAAE,qCAAqC;QAC9C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,sBAAsB,GAAuB;IACxD,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;IACpD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;SACjD;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,uBAAuB;QACvC,OAAO,EAAE,uBAAuB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;aACV;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;IAC/C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;SACpC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,qBAAqB,GAAuB;IACvD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;KACxB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,iCAAiC;QACjD,OAAO,EAAE,iCAAiC;QAC1C,IAAI,EAAE;YACJ,IAAI,EAAE,iBAAiB;SACxB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,aAAa,GAAuB;IAC/C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;IAC7E,MAAM,EAAE;QACN,cAAc,EAAE,sBAAsB;QACtC,OAAO,EAAE,sBAAsB;QAC/B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;KACpB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;IAC5E,MAAM,EAAE;QACN,cAAc,EAAE,qBAAqB;QACrC,OAAO,EAAE,qBAAqB;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,YAAY;IAC3B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,WAAW;QAC3B,OAAO,EAAE,WAAW;QACpB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;IACtC,MAAM,EAAE;QACN,cAAc,EAAE,gBAAgB;QAChC,OAAO,EAAE,gBAAgB;QACzB,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;IACvC,MAAM,EAAE;QACN,cAAc,EAAE,iBAAiB;QACjC,OAAO,EAAE,iBAAiB;QAC1B,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,iBAAiB;IAChC,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,oBAAoB;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAuB;IAClD,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;IAC9C,MAAM,EAAE;QACN,cAAc,EAAE,yBAAyB;QACzC,OAAO,EAAE,yBAAyB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,gCAAgC;QAChD,OAAO,EAAE,gCAAgC;QACzC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;IAC5C,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;SACnC;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,yBAAyB;IACxC,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,kBAAkB;QAClC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,cAAc,EAAE,QAAQ;QACxB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,QAAQ;QACjB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;aACV;SACF;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE,kBAAkB;CAC3B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAuB;IACtC,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;IAClC,MAAM,EAAE,cAAc;CACvB,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;IACrD,MAAM,EAAE;QACN,cAAc,EAAE,aAAa;QAC7B,OAAO,EAAE,aAAa;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,yBAAyB,GAAuB;IAC3D,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;IACvD,MAAM,EAAE;QACN,cAAc,EAAE,oBAAoB;QACpC,OAAO,EAAE,oBAAoB;QAC7B,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAuB;IAC1C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAuB;IACnD,aAAa,EAAE,mBAAmB;IAClC,MAAM,EAAE;QACN,cAAc,EAAE,0BAA0B;QAC1C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,0BAA0B;QACnC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,YAAY,EAAE,0BAA0B;QACxC,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,cAAc;QAC9B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,KAAK,GAAuB;IACvC,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,cAAc,EAAE,MAAM;QACtB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,MAAM;QACf,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,QAAQ;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,QAAQ;QACtB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iCAAiC,GAAuB;IACnE,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;KACpC;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;KAC3B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,uBAAuB,GAAuB;IACzD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;KAC1B;IACD,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAuB;IAC5C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,iBAAiB;QACjC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,WAAW;IAC1B,MAAM,EAAE;QACN,cAAc,EAAE,kBAAkB;QAClC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,kBAAkB;QAC3B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAuB;IAC7C,aAAa,EAAE,aAAa;IAC5B,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAuB;IACpD,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;IAChD,MAAM,EAAE;QACN,cAAc,EAAE,2BAA2B;QAC3C,OAAO,EAAE,2BAA2B;QACpC,IAAI,EAAE;YACJ,IAAI,EAAE,WAAW;SAClB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,OAAO;IACtB,MAAM,EAAE;QACN,cAAc,EAAE,YAAY;QAC5B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,YAAY;QACrB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,UAAU;QACxB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAA4B;IACnD,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;IAC1C,MAAM,EAAE;QACN,cAAc,EAAE,cAAc;QAC9B,OAAO,EAAE,cAAc;QACvB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAuB;IACjD,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;IAC7C,MAAM,EAAE;QACN,cAAc,EAAE,4BAA4B;QAC5C,OAAO,EAAE,4BAA4B;QACrC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,oBAAoB,GAAuB;IACtD,aAAa,EAAE,sBAAsB;IACrC,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;SAC9C;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,iBAAiB;QAC/B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,YAAY;QAC1B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,aAAa;QAC3B,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAAuB;IACzC,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;IACvE,MAAM,EAAE;QACN,cAAc,EAAE,6BAA6B;QAC7C,OAAO,EAAE,6BAA6B;QACtC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,cAAc,GAAuB;IAChD,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;KACjB;IACD,MAAM,EAAE;QACN,cAAc,EAAE,+BAA+B;QAC/C,OAAO,EAAE,+BAA+B;QACxC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,YAAY,GAAuB;IAC9C,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;IACzC,MAAM,EAAE;QACN,cAAc,EAAE,mBAAmB;QACnC,OAAO,EAAE,mBAAmB;QAC5B,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,MAAM;QACpB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,SAAS,GAAuB;IAC3C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,gBAAgB;QAChC,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAuB;IAC1D,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;IACtD,MAAM,EAAE;QACN,cAAc,EAAE,kCAAkC;QAClD,OAAO,EAAE,kCAAkC;QAC3C,IAAI,EAAE;YACJ,IAAI,EAAE,SAAS;SAChB;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,OAAO;QACrB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,OAAO,GAA4B;IAC9C,aAAa,EAAE,SAAS;IACxB,MAAM,EAAE;QACN,cAAc,EAAE,SAAS;QACzB,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,SAAS;QAClB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAuB;IACxC,aAAa,EAAE,QAAQ;IACvB,MAAM,EAAE,qBAAqB;CAC9B,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAA4B;IAC7C,aAAa,EAAE,MAAM;IACrB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,UAAU,EAAE,IAAI;QAChB,cAAc,EAAE,MAAM;QACtB,IAAI,EAAE;YACJ,IAAI,EAAE,QAAQ;SACf;KACF;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAA4B;IAC/C,aAAa,EAAE,UAAU;IACzB,MAAM,EAAE;QACN,YAAY,EAAE,WAAW;QACzB,cAAc,EAAE,eAAe;QAC/B,QAAQ,EAAE,IAAI;QACd,OAAO,EAAE,eAAe;QACxB,IAAI,EAAE;YACJ,IAAI,EAAE,MAAM;YACZ,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;SACnD;KACF;CACF,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2021-08-06\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js new file mode 100644 index 0000000000..7507b15575 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js @@ -0,0 +1,237 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a AppendBlob. */ +export class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType1 + ], + isXML: true, + serializer: xmlSerializer +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.maxSize, + Parameters.appendPosition + ], + mediaType: "binary", + serializer +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.transactionalContentMD5, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.maxSize, + Parameters.appendPosition, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.AppendBlobSealHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.AppendBlobSealExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.appendPosition + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=appendBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map new file mode 100644 index 0000000000..662f0016fa --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"appendBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/appendBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAanD,uCAAuC;AACvC,MAAM,OAAO,UAAU;IAGrB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;IACzC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;IAC9C,CAAC;IAED;;;;;;;;OAQG;IACH,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,IAAI,CACF,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;IACvC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;KAC1B;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;IAChD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js new file mode 100644 index 0000000000..6810389a75 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js @@ -0,0 +1,1079 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Blob. */ +export class Blob { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobDownloadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.rangeGetContentMD5, + Parameters.rangeGetContentCRC64, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: Mappers.BlobGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.BlobDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.blobDeleteType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobUndeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobUndeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetExpiryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.expiryOptions, + Parameters.expiresOn + ], + isXML: true, + serializer: xmlSerializer +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifUnmodifiedSince, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.legalHold + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope + ], + isXML: true, + serializer: xmlSerializer +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.tier, + Parameters.rehydratePriority, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sealBlob, + Parameters.legalHold1 + ], + isXML: true, + serializer: xmlSerializer +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.BlobCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.xMsRequiresSync, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags + ], + isXML: true, + serializer: xmlSerializer +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp15, + Parameters.copyId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.BlobSetTierHeaders + }, + 202: { + headersMapper: Mappers.BlobSetTierHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp16 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags, + Parameters.rehydratePriority, + Parameters.tier1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.BlobQueryHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobQueryExceptionHeaders + } + }, + requestBody: Parameters.queryRequest, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp17 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobTags, + headersMapper: Mappers.BlobGetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: Mappers.BlobSetTagsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlobSetTagsExceptionHeaders + } + }, + requestBody: Parameters.tags, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.versionId, + Parameters.comp18 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.leaseId, + Parameters.ifTags, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +//# sourceMappingURL=blob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map new file mode 100644 index 0000000000..71e3c61e39 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsDnD,iCAAiC;AACjC,MAAM,OAAO,IAAI;IAGf;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAAyC;QAEzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;IAC1C,CAAC;IAED;;;;;;;;;;;;;;OAcG;IACH,MAAM,CAAC,OAAkC;QACvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;IACnC,CAAC;IAED;;;OAGG;IACH,QAAQ,CACN,OAAoC;QAEpC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;IACrC,CAAC;IAED;;;;OAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC;QAErC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;IACtC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;IAClD,CAAC;IAED;;;OAGG;IACH,wBAAwB,CACtB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;IACrD,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;IACzC,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;IACvC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;;;;OAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;IACxC,CAAC;IAED;;;;;;OAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;IAC7C,CAAC;IAED;;;;;;;;OAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;IAC3C,CAAC;IAED;;;;OAIG;IACH,KAAK,CAAC,OAAiC;QACrC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;IAClC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;IAED;;;OAGG;IACH,OAAO,CAAC,OAAmC;QACzC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;IACpC,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,wBAAwB;SAChD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iBAAiB;SACzC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,cAAc;KAC1B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;IACpD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mBAAmB;SAC3C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;IACrD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,oBAAoB;SAC5C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;KAClC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;KAC1B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;IAC5D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;KACjB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;IACjD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,gBAAgB;SACxC;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,kBAAkB;SAC1C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js new file mode 100644 index 0000000000..037ca67455 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js @@ -0,0 +1,391 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a BlockBlob. */ +export class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobUploadHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobUploadExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.contentType1, + Parameters.accept2, + Parameters.blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.encryptionScope, + Parameters.tier, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceIfTags, + Parameters.copySource, + Parameters.blobTagsString, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.copySourceTags, + Parameters.transactionalContentMD5, + Parameters.blobType2, + Parameters.copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp24, + Parameters.blockId + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.sourceUrl, + Parameters.sourceContentCrc64, + Parameters.sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: Parameters.blocks, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlockList, + headersMapper: Mappers.BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.snapshot, + Parameters.comp25, + Parameters.listType + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=blockBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map new file mode 100644 index 0000000000..d76b8b412a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"blockBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/blockBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAmBnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;;;OAQG;IACH,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;;;;;;OAYG;IACH,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;IAED;;;;;;;;OAQG;IACH,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;;OASG;IACH,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;IACnD,CAAC;IAED;;;;;;;;;;OAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;;;OAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,wBAAwB;KACpC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;KACnB;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,YAAY;KACxB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,MAAM;IAC9B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;KACrC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,SAAS;YAC7B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js new file mode 100644 index 0000000000..17f5d4ca87 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js @@ -0,0 +1,769 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Container. */ +export class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.access, + Parameters.defaultEncryptionScope, + Parameters.preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const deleteOperationSpec = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: Mappers.ContainerDeleteHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerDeleteExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const setMetadataOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetMetadataHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp6 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: Mappers.ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: Parameters.containerAcl, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp7 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.access, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerRestoreHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp8 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.deletedContainerName, + Parameters.deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenameHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp9 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.sourceContainerName, + Parameters.sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp4, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where, + Parameters.restype2 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action, + Parameters.duration, + Parameters.proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action1, + Parameters.leaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const renewLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action2 + ], + isXML: true, + serializer: xmlSerializer +}; +const breakLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.action3, + Parameters.breakPeriod + ], + isXML: true, + serializer: xmlSerializer +}; +const changeLeaseOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.restype2, + Parameters.comp10 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.leaseId1, + Parameters.action4, + Parameters.proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsFlatSegmentResponse, + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.restype2, + Parameters.include1, + Parameters.delimiter + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=container.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map new file mode 100644 index 0000000000..1e99010626 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js.map @@ -0,0 +1 @@ +{"version":3,"file":"container.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/container.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwCnD,sCAAsC;AACtC,MAAM,OAAO,SAAS;IAGpB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA8C;QAE9C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACgB,CAAC;IAC/C,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;OAGG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;;OAIG;IACH,eAAe,CACb,OAAgD;QAEhD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;IACjD,CAAC;IAED;;;OAGG;IACH,OAAO,CACL,OAAwC;QAExC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;IACzC,CAAC;IAED;;;;OAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC;QAEvC,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;IACxC,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,WAAW,CACT,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,YAAY,CACV,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;IAC9C,CAAC;IAED;;;;;OAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;IAC5C,CAAC;IAED;;;;;;;;OAQG;IACH,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,mBAAmB,CACjB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;IACrD,CAAC;IAED;;;;;;;OAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD;QAEzD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;IAC1D,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,8BAA8B;KAC1C;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,6BAA6B;SACrD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,sCAAsC;SAC9D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,QAAQ;IACpB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,QAAQ,CAAC;IACnE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE;oBACJ,IAAI,EAAE,UAAU;oBAChB,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;qBAC3D;iBACF;gBACD,cAAc,EAAE,mBAAmB;gBACnC,OAAO,EAAE,mBAAmB;gBAC5B,YAAY,EAAE,IAAI;gBAClB,cAAc,EAAE,kBAAkB;aACnC;YACD,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,YAAY;IACpC,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;IACnD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,uBAAuB;SAC/C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,oBAAoB;QAC/B,UAAU,CAAC,uBAAuB;KACnC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,sBAAsB;SAC9C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,+BAA+B;SACvD;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,aAAa;KACzB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;IACxD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,QAAQ;KACpB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;KACvB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;KAC5B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;IAC/D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,4BAA4B;YAChD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;KACpB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;IACpE,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iCAAiC;YACrD,aAAa,EAAE,OAAO,CAAC,wCAAwC;SAChE;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,iDAAiD;SACzE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,SAAS;KACrB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,kBAAkB;IACxB,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js new file mode 100644 index 0000000000..eca2687929 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js @@ -0,0 +1,14 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./service"; +export * from "./container"; +export * from "./blob"; +export * from "./pageBlob"; +export * from "./appendBlob"; +export * from "./blockBlob"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map new file mode 100644 index 0000000000..6377015109 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/index.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,cAAc,WAAW,CAAC;AAC1B,cAAc,aAAa,CAAC;AAC5B,cAAc,QAAQ,CAAC;AACvB,cAAc,YAAY,CAAC;AAC3B,cAAc,cAAc,CAAC;AAC7B,cAAc,aAAa,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nexport * from \"./service\";\nexport * from \"./container\";\nexport * from \"./blob\";\nexport * from \"./pageBlob\";\nexport * from \"./appendBlob\";\nexport * from \"./blockBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js new file mode 100644 index 0000000000..a014a5ad72 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js @@ -0,0 +1,494 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a PageBlob. */ +export class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobCreateHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCreateExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.metadata, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobCacheControl, + Parameters.blobContentType, + Parameters.blobContentMD5, + Parameters.blobContentEncoding, + Parameters.blobContentLanguage, + Parameters.blobContentDisposition, + Parameters.immutabilityPolicyExpiry, + Parameters.immutabilityPolicyMode, + Parameters.encryptionScope, + Parameters.tier, + Parameters.blobTagsString, + Parameters.legalHold1, + Parameters.blobType, + Parameters.blobContentLength, + Parameters.blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: Parameters.body1, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.transactionalContentMD5, + Parameters.transactionalContentCrc64, + Parameters.contentType1, + Parameters.accept2, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobClearPagesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.pageWrite1 + ], + isXML: true, + serializer: xmlSerializer +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.contentLength, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.sourceIfModifiedSince, + Parameters.sourceIfUnmodifiedSince, + Parameters.sourceIfMatch, + Parameters.sourceIfNoneMatch, + Parameters.sourceContentMD5, + Parameters.copySourceAuthorization, + Parameters.pageWrite, + Parameters.ifSequenceNumberLessThanOrEqualTo, + Parameters.ifSequenceNumberLessThan, + Parameters.ifSequenceNumberEqualTo, + Parameters.sourceUrl, + Parameters.sourceRange, + Parameters.sourceContentCrc64, + Parameters.range1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags + ], + isXML: true, + serializer: xmlSerializer +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.PageList, + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.snapshot, + Parameters.comp20, + Parameters.prevsnapshot + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.range, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobResizeHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobResizeExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.encryptionKey, + Parameters.encryptionKeySha256, + Parameters.encryptionAlgorithm, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.encryptionScope, + Parameters.blobContentLength + ], + isXML: true, + serializer: xmlSerializer +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.leaseId, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.blobSequenceNumber, + Parameters.sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1, + Parameters.ifModifiedSince, + Parameters.ifUnmodifiedSince, + Parameters.ifMatch, + Parameters.ifNoneMatch, + Parameters.ifTags, + Parameters.copySource + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=pageBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map new file mode 100644 index 0000000000..8877c1931b --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"pageBlob.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/pageBlob.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAwBnD,qCAAqC;AACrC,MAAM,OAAO,QAAQ;IAGnB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;;OAMG;IACH,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C;QAE3C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;IAC5C,CAAC;IAED;;;;OAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;IAC3C,CAAC;IAED;;;;;;;;;;OAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD;QAElD,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;IACnD,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA6C;QAE7C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;IAC9C,CAAC;IAED;;;;OAIG;IACH,iBAAiB,CACf,OAAiD;QAEjD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;IAClD,CAAC;IAED;;;;;OAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC;QAEtC,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;IACvC,CAAC;IAED;;;;;;OAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;IACrD,CAAC;IAED;;;;;;;;;;;OAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C;QAE/C,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;IAChD,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,CAAC;IAC9C,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,sBAAsB;QACjC,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,cAAc;QACzB,UAAU,CAAC,UAAU;QACrB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,kBAAkB;KAC9B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,0BAA0B;SAClD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,KAAK;IAC7B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,yBAAyB;QACpC,UAAU,CAAC,YAAY;QACvB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;KACnC;IACD,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;IACtD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;IAC9D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,iCAAiC;SACzD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,0CAA0C;SAClE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,qBAAqB;QAChC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,iCAAiC;QAC5C,UAAU,CAAC,wBAAwB;QACnC,UAAU,CAAC,uBAAuB;QAClC,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;KAClB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;KAClB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;IAC7D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,QAAQ;YAC5B,aAAa,EAAE,OAAO,CAAC,gCAAgC;SACxD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,yCAAyC;SACjE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,QAAQ;QACnB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,YAAY;KACxB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;KAC3B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;IAClD,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,qBAAqB;SAC7C;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,mBAAmB;QAC9B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;KAC7B;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,gBAAgB,CAAC;IAC/D,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,kBAAkB;QAC7B,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;IAC3D,IAAI,EAAE,yBAAyB;IAC/B,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,8BAA8B;SACtD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,uCAAuC;SAC/D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,MAAM,CAAC;IACjE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,eAAe;QAC1B,UAAU,CAAC,iBAAiB;QAC5B,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,UAAU;KACtB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js new file mode 100644 index 0000000000..106bca828a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js @@ -0,0 +1,345 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +import * as Mappers from "../models/mappers"; +import * as Parameters from "../models/parameters"; +/** Class representing a Service. */ +export class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: Mappers.ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: Parameters.blobServiceProperties, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getPropertiesOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceProperties, + headersMapper: Mappers.ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.comp, + Parameters.timeoutInSeconds + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.BlobServiceStatistics, + headersMapper: Mappers.ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp1 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.ListContainersSegmentResponse, + headersMapper: Mappers.ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.comp2, + Parameters.prefix, + Parameters.marker, + Parameters.maxPageSize, + Parameters.include + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: Mappers.UserDelegationKey, + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: Parameters.keyInfo, + queryParameters: [ + Parameters.restype, + Parameters.timeoutInSeconds, + Parameters.comp3 + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getAccountInfoOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: Mappers.ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [Parameters.comp, Parameters.restype1], + urlParameters: [Parameters.url], + headerParameters: [Parameters.version, Parameters.accept1], + isXML: true, + serializer: xmlSerializer +}; +const submitBatchOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: Mappers.ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: Parameters.body, + queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.contentType, + Parameters.accept, + Parameters.version, + Parameters.requestId, + Parameters.contentLength, + Parameters.multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const filterBlobsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: Mappers.FilterBlobSegment, + headersMapper: Mappers.ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: Mappers.StorageError, + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + Parameters.timeoutInSeconds, + Parameters.marker, + Parameters.maxPageSize, + Parameters.comp5, + Parameters.where + ], + urlParameters: [Parameters.url], + headerParameters: [ + Parameters.version, + Parameters.requestId, + Parameters.accept1 + ], + isXML: true, + serializer: xmlSerializer +}; +//# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map new file mode 100644 index 0000000000..2826430f03 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js.map @@ -0,0 +1 @@ +{"version":3,"file":"service.js","sourceRoot":"","sources":["../../../../../../src/generated/src/operations/service.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAC7C,OAAO,KAAK,OAAO,MAAM,mBAAmB,CAAC;AAC7C,OAAO,KAAK,UAAU,MAAM,sBAAsB,CAAC;AAsBnD,oCAAoC;AACpC,MAAM,OAAO,OAAO;IAGlB;;;OAGG;IACH,YAAY,MAA4B;QACtC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;OAIG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;;;OAKG;IACH,aAAa,CACX,OAA4C;QAE5C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;IAC7C,CAAC;IAED;;;OAGG;IACH,qBAAqB,CACnB,OAAoD;QAEpD,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;IACrD,CAAC;IAED;;;;;OAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD;QAEnD,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;IACpD,CAAC;IAED;;;OAGG;IACH,cAAc,CACZ,OAAmC;QAEnC,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACc,CAAC;IAC9C,CAAC;IAED;;;;;;;OAOG;IACH,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;IAED;;;;;OAKG;IACH,WAAW,CACT,OAA0C;QAE1C,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE,QAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACc,CAAC;IAC3C,CAAC;CACF;AACD,2BAA2B;AAC3B,MAAM,aAAa,GAAG,IAAI,QAAQ,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,qBAAqB;IAC7C,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,IAAI;QACf,UAAU,CAAC,gBAAgB;KAC5B;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;IACzD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,qBAAqB;YACzC,aAAa,EAAE,OAAO,CAAC,2BAA2B;SACnD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,oCAAoC;SAC5D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;IACjE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,6BAA6B;YACjD,aAAa,EAAE,OAAO,CAAC,mCAAmC;SAC3D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,4CAA4C;SACpE;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,OAAO;KACnB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;IAChE,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,2CAA2C;SACnE;KACF;IACD,WAAW,EAAE,UAAU,CAAC,OAAO;IAC/B,eAAe,EAAE;QACf,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;KACrB;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;IAC1D,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,aAAa,EAAE,OAAO,CAAC,4BAA4B;SACpD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,qCAAqC;SAC7D;KACF;IACD,eAAe,EAAE,CAAC,UAAU,CAAC,IAAI,EAAE,UAAU,CAAC,QAAQ,CAAC;IACvD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,UAAU,CAAC,OAAO,CAAC;IAC1D,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,MAAM;IAClB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE;gBACV,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;gBACxB,cAAc,EAAE,gBAAgB;aACjC;YACD,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,WAAW,EAAE,UAAU,CAAC,IAAI;IAC5B,eAAe,EAAE,CAAC,UAAU,CAAC,gBAAgB,EAAE,UAAU,CAAC,KAAK,CAAC;IAChE,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,aAAa;QACxB,UAAU,CAAC,oBAAoB;KAChC;IACD,KAAK,EAAE,IAAI;IACX,WAAW,EAAE,gCAAgC;IAC7C,SAAS,EAAE,KAAK;IAChB,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;IACvD,IAAI,EAAE,GAAG;IACT,UAAU,EAAE,KAAK;IACjB,SAAS,EAAE;QACT,GAAG,EAAE;YACH,UAAU,EAAE,OAAO,CAAC,iBAAiB;YACrC,aAAa,EAAE,OAAO,CAAC,yBAAyB;SACjD;QACD,OAAO,EAAE;YACP,UAAU,EAAE,OAAO,CAAC,YAAY;YAChC,aAAa,EAAE,OAAO,CAAC,kCAAkC;SAC1D;KACF;IACD,eAAe,EAAE;QACf,UAAU,CAAC,gBAAgB;QAC3B,UAAU,CAAC,MAAM;QACjB,UAAU,CAAC,WAAW;QACtB,UAAU,CAAC,KAAK;QAChB,UAAU,CAAC,KAAK;KACjB;IACD,aAAa,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC;IAC/B,gBAAgB,EAAE;QAChB,UAAU,CAAC,OAAO;QAClB,UAAU,CAAC,SAAS;QACpB,UAAU,CAAC,OAAO;KACnB;IACD,KAAK,EAAE,IAAI;IACX,UAAU,EAAE,aAAa;CAC1B,CAAC","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js new file mode 100644 index 0000000000..dfcaa045c3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js @@ -0,0 +1,27 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import { Service, Container, Blob, PageBlob, AppendBlob, BlockBlob } from "./operations"; +import { StorageClientContext } from "./storageClientContext"; +export class StorageClient extends StorageClientContext { + /** + * Initializes a new instance of the StorageClient class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + super(url, options); + this.service = new Service(this); + this.container = new Container(this); + this.blob = new Blob(this); + this.pageBlob = new PageBlob(this); + this.appendBlob = new AppendBlob(this); + this.blockBlob = new BlockBlob(this); + } +} +//# sourceMappingURL=storageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map new file mode 100644 index 0000000000..1163d8e534 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClient.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClient.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,EACL,OAAO,EACP,SAAS,EACT,IAAI,EACJ,QAAQ,EACR,UAAU,EACV,SAAS,EACV,MAAM,cAAc,CAAC;AACtB,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC;AAG9D,MAAM,OAAO,aAAc,SAAQ,oBAAoB;IACrD;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,KAAK,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACpB,IAAI,CAAC,OAAO,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,CAAC;QACjC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC;QAC3B,IAAI,CAAC,QAAQ,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,CAAC;QACnC,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,CAAC,SAAS,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;CAQF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n Service,\n Container,\n Blob,\n PageBlob,\n AppendBlob,\n BlockBlob\n} from \"./operations\";\nimport { StorageClientContext } from \"./storageClientContext\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nexport class StorageClient extends StorageClientContext {\n /**\n * Initializes a new instance of the StorageClient class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n super(url, options);\n this.service = new Service(this);\n this.container = new Container(this);\n this.blob = new Blob(this);\n this.pageBlob = new PageBlob(this);\n this.appendBlob = new AppendBlob(this);\n this.blockBlob = new BlockBlob(this);\n }\n\n service: Service;\n container: Container;\n blob: Blob;\n pageBlob: PageBlob;\n appendBlob: AppendBlob;\n blockBlob: BlockBlob;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js new file mode 100644 index 0000000000..b21db50dff --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js @@ -0,0 +1,39 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +import * as coreHttp from "@azure/core-http"; +const packageName = "azure-storage-blob"; +const packageVersion = "12.11.0"; +export class StorageClientContext extends coreHttp.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2021-08-06"; + } +} +//# sourceMappingURL=storageClientContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map new file mode 100644 index 0000000000..655839cd89 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js.map @@ -0,0 +1 @@ +{"version":3,"file":"storageClientContext.js","sourceRoot":"","sources":["../../../../../src/generated/src/storageClientContext.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAEH,OAAO,KAAK,QAAQ,MAAM,kBAAkB,CAAC;AAG7C,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEjC,MAAM,OAAO,oBAAqB,SAAQ,QAAQ,CAAC,aAAa;IAI9D;;;;;OAKG;IACH,YAAY,GAAW,EAAE,OAAqC;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;YACrB,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;SACzC;QAED,0CAA0C;QAC1C,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QAED,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;YACtB,MAAM,gBAAgB,GAAG,QAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,GAAG,WAAW,IAAI,cAAc,IAAI,gBAAgB,EAAE,CAAC;SAC5E;QAED,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;QAE1B,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;QAE3C,wBAAwB;QACxB,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;QAEf,0CAA0C;QAC1C,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;IACjD,CAAC;CACF","sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.11.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2021-08-06\";\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js new file mode 100644 index 0000000000..bdaf72243f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=generatedModels.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map new file mode 100644 index 0000000000..e9600affd1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generatedModels.js.map @@ -0,0 +1 @@ +{"version":3,"file":"generatedModels.js","sourceRoot":"","sources":["../../../src/generatedModels.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Tags } from \".\";\nimport { BlobTags, BlobPropertiesInternal as BlobProperties } from \"./generated/src/models\";\n\nexport {\n AccessPolicy,\n AccessTier,\n AccountKind,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlHeaders,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockHeaders,\n AppendBlobCreateHeaders,\n ArchiveStatus,\n BlobDeleteImmutabilityPolicyHeaders,\n BlobDeleteImmutabilityPolicyResponse,\n BlobImmutabilityPolicyMode,\n BlobAbortCopyFromURLHeaders,\n BlobCopyFromURLHeaders,\n BlobCopySourceTags,\n BlobCreateSnapshotHeaders,\n BlobDeleteHeaders,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobGetPropertiesHeaders,\n BlobGetPropertiesResponse as BlobGetPropertiesResponseModel,\n BlobPropertiesInternal as BlobProperties,\n BlobUndeleteResponse,\n BlobHttpHeaders as BlobHTTPHeaders,\n BlobSetHttpHeadersHeaders as BlobSetHTTPHeadersHeaders,\n BlobSetHttpHeadersResponse as BlobSetHTTPHeadersResponse,\n BlobSetImmutabilityPolicyHeaders,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldHeaders,\n BlobSetLegalHoldResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLHeaders,\n BlobStartCopyFromURLResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobSetMetadataHeaders,\n BlobSetTierHeaders,\n BlobSetTierResponse,\n BlobSetTagsHeaders,\n BlobDownloadHeaders,\n BlobDownloadResponse as BlobDownloadResponseModel,\n BlobType,\n BlobTags,\n BlobUndeleteHeaders,\n Block,\n BlockBlobCommitBlockListHeaders,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListHeaders,\n BlockBlobStageBlockFromURLHeaders,\n BlockBlobStageBlockHeaders,\n BlockList,\n BlockListType,\n BlockBlobGetBlockListResponse,\n BlobServiceProperties,\n BlobServiceStatistics,\n BlobGetTagsHeaders,\n BlobTag,\n ContainerCreateHeaders,\n ContainerCreateResponse,\n ContainerDeleteHeaders,\n ContainerDeleteResponse,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesHeaders,\n ContainerBreakLeaseOptionalParams,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerGetPropertiesResponse,\n ContainerProperties,\n ContainerSetMetadataResponse,\n ContainerSetAccessPolicyHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataHeaders,\n CopyStatusType,\n CorsRule,\n CpkInfo,\n DeleteSnapshotsOptionType,\n EncryptionAlgorithmType,\n GeoReplication,\n GeoReplicationStatusType,\n LeaseAccessConditions,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n ListContainersSegmentResponse,\n FilterBlobItem as FilterBlobItemModel,\n FilterBlobSegment as FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n Logging,\n Metrics,\n ModifiedAccessConditions as ModifiedAccessConditionsModel,\n PublicAccessType,\n PageBlobCreateResponse,\n PageBlobUploadPagesResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobClearPagesHeaders,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalHeaders,\n PageBlobCreateHeaders,\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesResponse as PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesDiffResponse as PageBlobGetPageRangesDiffResponseModel,\n PageBlobResizeHeaders,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberHeaders,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLHeaders,\n PageBlobUploadPagesHeaders,\n PageBlobCopyIncrementalResponse,\n SequenceNumberActionType,\n RehydratePriority,\n RetentionPolicy,\n AppendPositionAccessConditions,\n ServiceGetUserDelegationKeyHeaders,\n ServiceSubmitBatchHeaders,\n ServiceGetAccountInfoHeaders,\n ServiceGetPropertiesHeaders,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsHeaders,\n SequenceNumberAccessConditions,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentHeaders,\n ServiceListContainersSegmentResponse,\n ServiceSetPropertiesHeaders,\n SkuName,\n StaticWebsite,\n ContainerItem,\n ServiceSubmitBatchResponse as ServiceSubmitBatchResponseModel,\n ServiceSubmitBatchOptionalParams as ServiceSubmitBatchOptionalParamsModel,\n SignedIdentifier as SignedIdentifierModel,\n SyncCopyStatusType,\n UserDelegationKey as UserDelegationKeyModel,\n ContainerEncryptionScope,\n BlobQueryHeaders,\n BlobQueryResponse as BlobQueryResponseModel,\n ContainerRestoreResponse as ContainerUndeleteResponse,\n ContainerRestoreHeaders as ContainerUndeleteHeaders,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobPutBlobFromUrlHeaders,\n ContainerRenameResponse,\n ContainerRenameHeaders,\n} from \"./generated/src/models\";\n\n// Following definitions are to avoid breaking change.\nexport interface BlobPrefix {\n name: string;\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsFlatSegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobFlatListSegmentModel {\n blobItems: BlobItemInternal[];\n}\n\n/** An enumeration of blobs */\nexport interface ListBlobsHierarchySegmentResponseModel {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegmentModel;\n continuationToken?: string;\n}\n\nexport interface BlobHierarchyListSegmentModel {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItemInternal[];\n}\n\n/** An Azure Storage blob */\nexport interface BlobItemInternal {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n /** Properties of a blob */\n properties: BlobProperties;\n /** Dictionary of */\n metadata?: { [propertyName: string]: string };\n /** Blob tags */\n blobTags?: BlobTags;\n /** Dictionary of */\n objectReplicationMetadata?: { [propertyName: string]: string };\n /** Inactive root blobs which have any versions would have such tag with value true. */\n hasVersionsOnly?: boolean;\n}\n\n/**\n * Blob info from a {@link BlobServiceClient.findBlobsByTags}\n */\nexport interface FilterBlobItem {\n /**\n * Blob Name.\n */\n name: string;\n\n /**\n * Container Name.\n */\n containerName: string;\n\n /**\n * Blob Tags.\n */\n tags?: Tags;\n\n /**\n * Tag value.\n *\n * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags.\n */\n tagValue: string;\n}\n\n/**\n * Segment response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface FilterBlobSegment {\n serviceEndpoint: string;\n where: string;\n blobs: FilterBlobItem[];\n continuationToken?: string;\n}\n\nexport interface PageRangeInfo {\n start: number;\n end: number;\n isClear: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js new file mode 100644 index 0000000000..93dc89c137 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export { BlockBlobTier, PremiumPageBlobTier, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map new file mode 100644 index 0000000000..c9629ebb2d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAE7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AAGzC,OAAO,EACL,aAAa,EAEb,mBAAmB,GAOpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAE5C,cAAc,mBAAmB,CAAC;AAClC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n BlobImmutabilityPolicy,\n PremiumPageBlobTier,\n Tags,\n TagConditions,\n ContainerRequestConditions,\n HttpAuthorization,\n ModificationConditions,\n MatchConditions,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js new file mode 100644 index 0000000000..41b1b1de9e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { RestError } from "@azure/core-http"; +export * from "./BlobServiceClient"; +export * from "./Clients"; +export * from "./ContainerClient"; +export * from "./BlobLeaseClient"; +export * from "./sas/AccountSASPermissions"; +export * from "./sas/AccountSASResourceTypes"; +export * from "./sas/AccountSASServices"; +export * from "./sas/AccountSASSignatureValues"; +export * from "./BlobBatch"; +export * from "./BlobBatchClient"; +export * from "./BatchResponse"; +export * from "./sas/BlobSASPermissions"; +export * from "./sas/BlobSASSignatureValues"; +export * from "./StorageBrowserPolicyFactory"; +export * from "./sas/ContainerSASPermissions"; +export * from "./credentials/AnonymousCredential"; +export * from "./credentials/Credential"; +export * from "./credentials/StorageSharedKeyCredential"; +export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, } from "./models"; +export * from "./Pipeline"; +export * from "./policies/AnonymousCredentialPolicy"; +export * from "./policies/CredentialPolicy"; +export * from "./StorageRetryPolicyFactory"; +export * from "./policies/StorageSharedKeyCredentialPolicy"; +export * from "./sas/SASQueryParameters"; +export * from "./generatedModels"; +export { RestError }; +export { logger } from "./log"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map new file mode 100644 index 0000000000..d7ec1197c6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAC;AAG7C,cAAc,qBAAqB,CAAC;AACpC,cAAc,WAAW,CAAC;AAC1B,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,0BAA0B,CAAC;AACzC,cAAc,iCAAiC,CAAC;AAChD,cAAc,aAAa,CAAC;AAC5B,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,0BAA0B,CAAC;AACzC,cAAc,8BAA8B,CAAC;AAC7C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,+BAA+B,CAAC;AAC9C,cAAc,mCAAmC,CAAC;AAClD,cAAc,0BAA0B,CAAC;AACzC,cAAc,0CAA0C,CAAC;AAGzD,OAAO,EACL,aAAa,EACb,mBAAmB,EAUnB,mBAAmB,GACpB,MAAM,UAAU,CAAC;AAClB,cAAc,YAAY,CAAC;AAC3B,cAAc,sCAAsC,CAAC;AACrD,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6BAA6B,CAAC;AAC5C,cAAc,6CAA6C,CAAC;AAC5D,cAAc,0BAA0B,CAAC;AAEzC,cAAc,mBAAmB,CAAC;AAYlC,OAAO,EAAE,SAAS,EAAE,CAAC;AAMrB,OAAO,EAAE,MAAM,EAAE,MAAM,OAAO,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RestError } from \"@azure/core-http\";\n\nexport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nexport * from \"./BlobServiceClient\";\nexport * from \"./Clients\";\nexport * from \"./ContainerClient\";\nexport * from \"./BlobLeaseClient\";\nexport * from \"./sas/AccountSASPermissions\";\nexport * from \"./sas/AccountSASResourceTypes\";\nexport * from \"./sas/AccountSASServices\";\nexport * from \"./sas/AccountSASSignatureValues\";\nexport * from \"./BlobBatch\";\nexport * from \"./BlobBatchClient\";\nexport * from \"./BatchResponse\";\nexport * from \"./sas/BlobSASPermissions\";\nexport * from \"./sas/BlobSASSignatureValues\";\nexport * from \"./StorageBrowserPolicyFactory\";\nexport * from \"./sas/ContainerSASPermissions\";\nexport * from \"./credentials/AnonymousCredential\";\nexport * from \"./credentials/Credential\";\nexport * from \"./credentials/StorageSharedKeyCredential\";\nexport { SasIPRange } from \"./sas/SasIPRange\";\nexport { Range } from \"./Range\";\nexport {\n BlockBlobTier,\n PremiumPageBlobTier,\n Tags,\n BlobDownloadResponseParsed,\n BlobImmutabilityPolicy,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n BlobQueryArrowField,\n BlobQueryArrowFieldType,\n HttpAuthorization,\n StorageBlobAudience,\n} from \"./models\";\nexport * from \"./Pipeline\";\nexport * from \"./policies/AnonymousCredentialPolicy\";\nexport * from \"./policies/CredentialPolicy\";\nexport * from \"./StorageRetryPolicyFactory\";\nexport * from \"./policies/StorageSharedKeyCredentialPolicy\";\nexport * from \"./sas/SASQueryParameters\";\nexport { CommonOptions } from \"./StorageClient\";\nexport * from \"./generatedModels\";\nexport {\n AppendBlobRequestConditions,\n BlobRequestConditions,\n Metadata,\n PageBlobRequestConditions,\n TagConditions,\n ContainerRequestConditions,\n ModificationConditions,\n MatchConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nexport { RestError };\nexport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n PageList,\n} from \"./PageBlobRangeResponse\";\nexport { logger } from \"./log\";\nexport {\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js new file mode 100644 index 0000000000..a746f21ccb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +/** + * The `@azure/logger` configuration for this package. + */ +export const logger = createClientLogger("storage-blob"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map new file mode 100644 index 0000000000..839d2f0c07 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/log.js.map @@ -0,0 +1 @@ +{"version":3,"file":"log.js","sourceRoot":"","sources":["../../../src/log.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,kBAAkB,EAAE,MAAM,eAAe,CAAC;AAEnD;;GAEG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,kBAAkB,CAAC,cAAc,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js new file mode 100644 index 0000000000..2945123dff --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js @@ -0,0 +1,104 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EncryptionAlgorithmAES25 } from "./utils/constants"; +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export var BlockBlobTier; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(BlockBlobTier || (BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export var PremiumPageBlobTier; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(PremiumPageBlobTier || (PremiumPageBlobTier = {})); +export function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +export function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +export var StorageBlobAudience; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(StorageBlobAudience || (StorageBlobAudience = {})); +//# sourceMappingURL=models.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map new file mode 100644 index 0000000000..713fa16eae --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js.map @@ -0,0 +1 @@ +{"version":3,"file":"models.js","sourceRoot":"","sources":["../../../src/models.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAWlC,OAAO,EAAE,wBAAwB,EAAE,MAAM,mBAAmB,CAAC;AAyF7D;;;GAGG;AACH,MAAM,CAAN,IAAY,aAcX;AAdD,WAAY,aAAa;IACvB;;OAEG;IACH,4BAAW,CAAA;IACX;;OAEG;IACH,8BAAa,CAAA;IACb;;;OAGG;IACH,oCAAmB,CAAA;AACrB,CAAC,EAdW,aAAa,KAAb,aAAa,QAcxB;AAED;;;;GAIG;AACH,MAAM,CAAN,IAAY,mBA6CX;AA7CD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,gCAAS,CAAA;IACT;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;IACX;;OAEG;IACH,kCAAW,CAAA;AACb,CAAC,EA7CW,mBAAmB,KAAnB,mBAAmB,QA6C9B;AAED,MAAM,UAAU,YAAY,CAC1B,IAA8D;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,OAAO,IAAkB,CAAC,CAAC,qGAAqG;AAClI,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAC,GAAwB,EAAE,OAAgB;IAC7E,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;QACnB,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;KACnF;IAED,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;QACnC,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;KACpD;AACH,CAAC;AA2HD;;GAEG;AACH,MAAM,CAAN,IAAY,mBASX;AATD,WAAY,mBAAmB;IAC7B;;OAEG;IACH,gFAAyD,CAAA;IACzD;;OAEG;IACH,yFAAkE,CAAA;AACpE,CAAC,EATW,mBAAmB,KAAnB,mBAAmB,QAS9B","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js new file mode 100644 index 0000000000..12270dbcfe --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} +//# sourceMappingURL=AnonymousCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map new file mode 100644 index 0000000000..ecb1efb562 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/AnonymousCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AnonymousCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/AnonymousCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;;GAGG;AACH,MAAM,OAAO,yBAA0B,SAAQ,gBAAgB;IAC7D;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js new file mode 100644 index 0000000000..a2f1fccc7f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy } from "@azure/core-http"; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} +//# sourceMappingURL=CredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map new file mode 100644 index 0000000000..1c1ba7e161 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"CredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/CredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAsC,MAAM,kBAAkB,CAAC;AAEzF;;;GAGG;AACH,MAAM,OAAgB,gBAAiB,SAAQ,iBAAiB;IAC9D;;;;OAIG;IACI,WAAW,CAAC,OAAoB;QACrC,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;IACjE,CAAC;IAED;;;;;OAKG;IACO,WAAW,CAAC,OAAoB;QACxC,4EAA4E;QAC5E,qCAAqC;QACrC,OAAO,OAAO,CAAC;IACjB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js new file mode 100644 index 0000000000..a272017ba7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { URLBuilder } from "@azure/core-http"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { delay } from "@azure/core-http"; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +export function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} +//# sourceMappingURL=StorageBearerTokenChallengeAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map new file mode 100644 index 0000000000..2992be83a5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBearerTokenChallengeAuthenticationPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAiD,UAAU,EAAE,MAAM,kBAAkB,CAAC;AAC7F,OAAO,EACL,iBAAiB,GAIlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEzC;;GAEG;AACH,MAAM,SAAS,GAAG;IAChB,YAAY,EAAE,WAAW;IACzB;;OAEG;IACH,eAAe,EAAE;QACf;;WAEG;QACH,aAAa,EAAE,eAAe;KAC/B;CACF,CAAC;AAiCF,sDAAsD;AACtD,MAAM,sBAAsB,GAAuB;IACjD,uBAAuB,EAAE,IAAI;IAC7B,iBAAiB,EAAE,IAAI;IACvB,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC,EAAE,oCAAoC;CACvE,CAAC;AAEF;;;;;;;;;;;;GAYG;AACH,KAAK,UAAU,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB;IAEnB,4EAA4E;IAC5E,eAAe;IACf,KAAK,UAAU,iBAAiB;QAC9B,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;aAC/B;YAAC,WAAM;gBACN,OAAO,IAAI,CAAC;aACb;SACF;aAAM;YACL,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;YAE1C,6CAA6C;YAC7C,IAAI,UAAU,KAAK,IAAI,EAAE;gBACvB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,OAAO,UAAU,CAAC;SACnB;IACH,CAAC;IAED,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;QACrB,MAAM,KAAK,CAAC,iBAAiB,CAAC,CAAC;QAE/B,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;KACnC;IAED,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;IAErC,MAAM,OAAO,mCACR,sBAAsB,GACtB,kBAAkB,CACtB,CAAC;IAEF;;;OAGG;IACH,MAAM,MAAM,GAAG;QACb;;WAEG;QACH,IAAI,YAAY;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;QAChC,CAAC;QACD;;;WAGG;QACH,IAAI,aAAa;;YACf,OAAO,CACL,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,CAAC,CAAC,GAAG,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1E,CAAC;QACJ,CAAC;QACD;;;WAGG;QACH,IAAI,WAAW;YACb,OAAO,CACL,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,CAC1F,CAAC;QACJ,CAAC;KACF,CAAC;IAEF;;;OAGG;IACH,SAAS,OAAO,CAAC,eAAgC;;QAC/C,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;YACxB,yDAAyD;YACzD,MAAM,iBAAiB,GAAG,GAAgC,EAAE,CAC1D,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;YAE/C,wEAAwE;YACxE,6CAA6C;YAC7C,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;YACzB,+DAA+D;YAC/D,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,kBAAkB,mCAAI,IAAI,CAAC,GAAG,EAAE,CACxC;iBACE,IAAI,CAAC,CAAC,MAAM,EAAE,EAAE;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;gBACf,OAAO,KAAK,CAAC;YACf,CAAC,CAAC;iBACD,KAAK,CAAC,CAAC,MAAM,EAAE,EAAE;gBAChB,sEAAsE;gBACtE,qEAAqE;gBACrE,mBAAmB;gBACnB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;gBACb,MAAM,MAAM,CAAC;YACf,CAAC,CAAC,CAAC;SACN;QAED,OAAO,aAAqC,CAAC;IAC/C,CAAC;IAED,OAAO,KAAK,EAAE,YAA6B,EAAwB,EAAE;QACnE,EAAE;QACF,gBAAgB;QAChB,+DAA+D;QAC/D,6CAA6C;QAC7C,+DAA+D;QAC/D,yCAAyC;QACzC,6DAA6D;QAC7D,YAAY;QACZ,EAAE;QAEF,IAAI,MAAM,CAAC,WAAW;YAAE,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;SACvB;QAED,OAAO,KAAoB,CAAC;IAC9B,CAAC,CAAC;AACJ,CAAC;AACD;;;GAGG;AACH,SAAS,YAAY,CAAC,QAA+B;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;IAC3D,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;QACxC,OAAO,SAAS,CAAC;KAClB;IACD,OAAO;AACT,CAAC;AAUD;;;;;GAKG;AACH,SAAS,cAAc,CAAC,SAAiB;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,GAAG,eAAe,CAAC,IAAI,EAAE,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC;IAChF,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAAE,CACpD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;IACF,mCAAmC;IACnC,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,iCAAM,CAAC,GAAK,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED,aAAa;AAEb;;;;;;GAMG;AAEH,MAAM,UAAU,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB;IAEzB,wFAAwF;IACxF,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQ,iBAAiB;QAC7E,YAAmB,UAAyB,EAAE,OAA6B;YACzE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC7B,CAAC;QAEM,KAAK,CAAC,WAAW,CAAC,WAA4B;YACnD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;gBACzD,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;aACH;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;YAClC,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;gBACpC,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;iBAC3C;aACF,CAAC,CACH,CAAC,KAAK,CAAC;YACR,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,UAAU,KAAK,EAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,aAAR,QAAQ,uBAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;gBAC5B,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;gBACzC,IAAI,SAAS,EAAE;oBACb,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAG,UAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;oBACzD,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;oBAE5E,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;wBACpC,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;yBAC3C;wBACD,QAAQ,EAAE,QAAQ;qBACnB,CAAC,CACH,CAAC,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;oBAChC,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,UAAU,iBAAiB,EAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;iBAClD;aACF;YAED,OAAO,QAAQ,CAAC;QAClB,CAAC;KACF;IAED,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAE,EAAE;YACnE,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAClF,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js new file mode 100644 index 0000000000..ad35d504c7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js @@ -0,0 +1,46 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, isNode, } from "@azure/core-http"; +import { HeaderConstants, URLConstants } from "../utils/constants"; +import { setURLParameter } from "../utils/utils.common"; +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=StorageBrowserPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map new file mode 100644 index 0000000000..0916fcf8f1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageBrowserPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageBrowserPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EAEjB,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AACnE,OAAO,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAExD;;;;;;;;;;GAUG;AACH,MAAM,OAAO,oBAAqB,SAAQ,iBAAiB;IACzD;;;;OAIG;IACH,wGAAwG;IACxG,uEAAuE;IACvE,YAAY,UAAyB,EAAE,OAA6B;QAClE,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;SAC9C;QAED,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;SACH;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;QAE/C,oFAAoF;QACpF,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js new file mode 100644 index 0000000000..040037381e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { BaseRequestPolicy, } from "@azure/core-http"; +import { URLConstants } from "../utils/constants"; +import { delay, setURLHost, setURLParameter } from "../utils/utils.common"; +import { logger } from "../log"; +/** + * A factory method used to generated a RetryPolicy factory. + * + * @param retryOptions - + */ +export function NewRetryPolicyFactory(retryOptions) { + return { + create: (nextPolicy, options) => { + return new StorageRetryPolicy(nextPolicy, options, retryOptions); + }, + }; +} +/** + * RetryPolicy types. + */ +export var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} +//# sourceMappingURL=StorageRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map new file mode 100644 index 0000000000..11f74f2fc4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageRetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageRetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,OAAO,EAEL,iBAAiB,GAOlB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAClD,OAAO,EAAE,KAAK,EAAE,UAAU,EAAE,eAAe,EAAE,MAAM,uBAAuB,CAAC;AAC3E,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,YAAkC;IACtE,OAAO;QACL,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,EAAsB,EAAE;YACvF,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,YAAY,CAAC,CAAC;QACnE,CAAC;KACF,CAAC;AACJ,CAAC;AAED;;GAEG;AACH,MAAM,CAAN,IAAY,sBASX;AATD,WAAY,sBAAsB;IAChC;;OAEG;IACH,iFAAW,CAAA;IACX;;OAEG;IACH,qEAAK,CAAA;AACP,CAAC,EATW,sBAAsB,KAAtB,sBAAsB,QASjC;AAED,wCAAwC;AACxC,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;IAC7B,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAE,sBAAsB,CAAC,WAAW;IACnD,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS,EAAE,2CAA2C;CACvE,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;GAEG;AACH,MAAM,OAAO,kBAAmB,SAAQ,iBAAiB;IAMvD;;;;;;OAMG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB;QAEzD,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAE3B,2BAA2B;QAC3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;gBAC3C,CAAC,CAAC,YAAY,CAAC,eAAe;gBAC9B,CAAC,CAAC,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;gBACjD,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;gBACnC,CAAC,CAAC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,YAAY,CAAC,cAAc;gBAC7B,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;gBAC7D,CAAC,CAAC,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;oBAC5B,CAAC,CAAC,YAAY,CAAC,iBAAiB;oBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAkB,CAC7C;gBACH,CAAC,CAAC,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;gBACnE,CAAC,CAAC,YAAY,CAAC,iBAAiB;gBAChC,CAAC,CAAC,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;gBACvC,CAAC,CAAC,YAAY,CAAC,aAAa;gBAC5B,CAAC,CAAC,qBAAqB,CAAC,aAAa;SACxC,CAAC;IACJ,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;IACpD,CAAC;IAED;;;;;;;;;OASG;IACO,KAAK,CAAC,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe;QAEf,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;YACf,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;YAChC,CAAC,CAAC,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;YACxF,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;YACnB,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;SAC/E;QAED,kEAAkE;QAClE,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;YACpC,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;SACH;QAED,IAAI,QAA2C,CAAC;QAChD,IAAI;YACF,MAAM,CAAC,IAAI,CAAC,2BAA2B,OAAO,IAAI,cAAc,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;gBACxD,OAAO,QAAQ,CAAC;aACjB;YAED,eAAe,GAAG,eAAe,IAAI,CAAC,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;SACnF;QAAC,OAAO,GAAQ,EAAE;YACjB,MAAM,CAAC,KAAK,CAAC,uCAAuC,GAAG,CAAC,OAAO,WAAW,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;YACtF,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;gBAC7D,MAAM,GAAG,CAAC;aACX;SACF;QAED,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;;OAOG;IACO,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe;QAEf,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,2BAA2B,OAAO,gBAAgB,IAAI,CAAC,YAAY;iBAChE,QAAS,mBAAmB,CAChC,CAAC;YACF,OAAO,KAAK,CAAC;SACd;QAED,iFAAiF;QACjF,uBAAuB;QACvB,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;YACP,oBAAoB,EAAE,2DAA2D;SAClF,CAAC;QACF,IAAI,GAAG,EAAE;YACP,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAClD,CAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;oBACA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,qBAAqB,CAAC,CAAC;oBAC/E,OAAO,IAAI,CAAC;iBACb;aACF;SACF;QAED,kFAAkF;QAClF,gFAAgF;QAChF,gEAAgE;QAChE,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;YACzE,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;gBACzC,MAAM,CAAC,IAAI,CAAC,qDAAqD,CAAC,CAAC;gBACnE,OAAO,IAAI,CAAC;aACb;YAED,0CAA0C;YAC1C,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;gBAC5C,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,GAAG,CAAC,CAAC;gBACtE,OAAO,IAAI,CAAC;aACb;SACF;QAED,IAAI,CAAA,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,iCAAiC,CAAC,CAAA,EAAE;YAC7F,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;YACF,OAAO,IAAI,CAAC;SACb;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;OAMG;IACK,KAAK,CAAC,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;QAE9B,IAAI,cAAc,EAAE;YAClB,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe,EAAE;gBACzC,KAAK,sBAAsB,CAAC,WAAW;oBACrC,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAK,sBAAsB,CAAC,KAAK;oBAC/B,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;aACT;SACF;aAAM;YACL,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;SACtC;QAED,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,IAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;IAC9D,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js new file mode 100644 index 0000000000..ac9b2ce558 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js @@ -0,0 +1,140 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HeaderConstants } from "../utils/constants"; +import { getURLPath, getURLQueries } from "../utils/utils.common"; +import { CredentialPolicy } from "./CredentialPolicy"; +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} +//# sourceMappingURL=StorageSharedKeyCredentialPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map new file mode 100644 index 0000000000..a421e6598f --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"StorageSharedKeyCredentialPolicy.js","sourceRoot":"","sources":["../../../../src/policies/StorageSharedKeyCredentialPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAIlC,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,aAAa,EAAE,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;GAEG;AACH,MAAM,OAAO,gCAAiC,SAAQ,gBAAgB;IAMpE;;;;;OAKG;IACH,YACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC;QAEnC,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;IACzB,CAAC;IAED;;;;OAIG;IACO,WAAW,CAAC,OAAoB;QACxC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;YACZ,CAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;YAC5E,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;YACA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;SACtF;QAED,MAAM,YAAY,GAChB;YACE,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;YACJ,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;YAC3C,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;QACvE,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,EAAE,CACrD,CAAC;QAEF,uCAAuC;QACvC,0DAA0D;QAC1D,mEAAmE;QACnE,+EAA+E;QAC/E,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;;;;;OAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,EAAE,CAAC;SACX;QAED,0EAA0E;QAC1E,sEAAsE;QACtE,yFAAyF;QACzF,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;YAClE,OAAO,EAAE,CAAC;SACX;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;;;;;;;;OAYG;IACK,6BAA6B,CAAC,OAAoB;QACxD,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,EAAE;YACjE,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;QACjF,CAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAU,EAAE;YACjC,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;QAClE,CAAC,CAAC,CAAC;QAEH,2BAA2B;QAC3B,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;gBACjF,OAAO,KAAK,CAAC;aACd;YACD,OAAO,IAAI,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;QAClD,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,EAAE,EAAE;YAC9B,gCAAgC,IAAI,GAAG,MAAM,CAAC,IAAI;iBAC/C,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC;QAChD,CAAC,CAAC,CAAC;QAEH,OAAO,gCAAgC,CAAC;IAC1C,CAAC;IAED;;;;OAIG;IACK,8BAA8B,CAAC,OAAoB;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,IAAI,IAAI,CAAC,OAAO,CAAC,WAAW,GAAG,IAAI,EAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;QACvD,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;YAC/B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;gBACzB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;oBACtD,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;oBAC9C,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;iBAC9B;aACF;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;YACjB,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;gBAC3B,2BAA2B,IAAI,KAAK,GAAG,IAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC;aACxF;SACF;QAED,OAAO,2BAA2B,CAAC;IACrC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js new file mode 100644 index 0000000000..f5b173bbbf --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BaseRequestPolicy, HttpHeaders, isNode, } from "@azure/core-http"; +import { HeaderConstants } from "../utils/constants"; +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +export class TelemetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (isNode) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} +//# sourceMappingURL=TelemetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map new file mode 100644 index 0000000000..5b4b98bff5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TelemetryPolicy.js","sourceRoot":"","sources":["../../../../src/policies/TelemetryPolicy.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EACL,iBAAiB,EACjB,WAAW,EAEX,MAAM,GAIP,MAAM,kBAAkB,CAAC;AAE1B,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AAErD;;GAEG;AACH,MAAM,OAAO,eAAgB,SAAQ,iBAAiB;IAMpD;;;;;OAKG;IACH,YAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB;QACrF,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;QAC3B,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;IAC7B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,WAAW,CAAC,OAAoB;QAC3C,IAAI,MAAM,EAAE;YACV,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;gBACpB,OAAO,CAAC,OAAO,GAAG,IAAI,WAAW,EAAE,CAAC;aACrC;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;gBACpD,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;aACjE;SACF;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js new file mode 100644 index 0000000000..5203424553 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "@azure/core-http"; +import { Poller } from "@azure/core-lro"; +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +export class BlobBeginCopyFromUrlPoller extends Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} +//# sourceMappingURL=BlobStartCopyFromUrlPoller.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map new file mode 100644 index 0000000000..b41e8693b3 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobStartCopyFromUrlPoller.js","sourceRoot":"","sources":["../../../../src/pollers/BlobStartCopyFromUrlPoller.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACzC,OAAO,EAAqC,MAAM,EAAE,MAAM,iBAAiB,CAAC;AAsE5E;;;;;GAKG;AACH,MAAM,OAAO,0BAA2B,SAAQ,MAG/C;IAGC,YAAY,OAA0C;QACpD,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;QAEZ,IAAI,KAAgD,CAAC;QAErD,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;SACtC;QAED,MAAM,SAAS,GAAG,qCAAqC,iCAClD,KAAK,KACR,UAAU;YACV,UAAU;YACV,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;QAEjB,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;YACpC,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;SAC7B;QAED,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;IACnC,CAAC;IAEM,KAAK;QACV,OAAO,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;IAClC,CAAC;CACF;AAED;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;QACrB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,IAAI,CAAC,MAAM,EAAE;QACX,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;QACzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;KACrD;IAED,sFAAsF;IACtF,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;KACjC,CAAC,CAAC;IACH,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;IAEzB,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,MAAM,GAAgD,KAAK,UAAU,MAAM,CAE/E,OAAO,GAAG,EAAE;IAEZ,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;IAElE,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;QACpB,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;QAEtF,4BAA4B;QAC5B,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;QAC7B,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;YACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;YACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;SAAM,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;YACF,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;YAC1F,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;YAC5C,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;YAC5C,IAAI,YAAY,EAAE;gBAChB,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;aACnC;YACD,IACE,UAAU,KAAK,SAAS;gBACxB,YAAY,KAAK,gBAAgB;gBACjC,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;gBACA,2CAA2C;gBAC3C,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;aAC7B;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;gBACnC,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;gBACtB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;gBAClC,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,kCAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,GAAG,CAC/E,CAAC;gBACF,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;aAC1B;SACF;QAAC,OAAO,GAAQ,EAAE;YACjB,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;YAClB,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;SAC1B;KACF;IAED,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;GAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ;IAG/E,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE;QAC1D,2FAA2F;QAC3F,IAAI,GAAG,KAAK,YAAY,EAAE;YACxB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,KAAK,CAAC;IACf,CAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;GAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC;IAEpC,OAAO;QACL,KAAK,oBAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js new file mode 100644 index 0000000000..1410738682 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js @@ -0,0 +1,227 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=AccountSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map new file mode 100644 index 0000000000..c1e001714d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,qBAAqB;IAAlC;QA4GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA0D1C,CAAC;IApOC;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAE1D,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;YAC3B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;gBACR,KAAK,GAAG;oBACN,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,EAAE,CAAC,CAAC;aAC9D;SACF;QAED,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAyC;QAC1D,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC5C;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACpD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC9C;QACD,OAAO,qBAAqB,CAAC;IAC/B,CAAC;IAmED;;;;;;;;;OASG;IACI,QAAQ;QACb,iFAAiF;QACjF,wFAAwF;QACxF,iFAAiF;QACjF,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js new file mode 100644 index 0000000000..7780df0fb5 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} +//# sourceMappingURL=AccountSASResourceTypes.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map new file mode 100644 index 0000000000..23f49a59a2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASResourceTypes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASResourceTypes.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASResourceTypes.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6BE;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,cAAS,GAAY,KAAK,CAAC;QAElC;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;IAqBjC,CAAC;IA9DC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,aAAqB;QACvC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;YAC7B,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAiBD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACzB;QACD,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAChC,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js new file mode 100644 index 0000000000..2e6f1e7452 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js @@ -0,0 +1,80 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} +//# sourceMappingURL=AccountSASServices.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map new file mode 100644 index 0000000000..1a36f19900 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASServices.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASServices.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASServices.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAgCE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;IAsBhC,CAAC;IAvEC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,QAAgB;QAClC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;YACxB,QAAQ,CAAC,EAAE;gBACT,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,EAAE,CAAC,CAAC;aAC3D;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAsBD;;;OAGG;IACI,QAAQ;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACpB;QACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC3B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js new file mode 100644 index 0000000000..ceaf07c938 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AccountSASPermissions } from "./AccountSASPermissions"; +import { AccountSASResourceTypes } from "./AccountSASResourceTypes"; +import { AccountSASServices } from "./AccountSASServices"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} +//# sourceMappingURL=AccountSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map new file mode 100644 index 0000000000..7e87510cbb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AccountSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/AccountSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,qBAAqB,EAAE,MAAM,yBAAyB,CAAC;AAChE,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAmE7D;;;;;;;;;;GAUG;AACH,MAAM,UAAU,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C;IAE/C,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;QAC/C,CAAC,CAAC,yBAAyB,CAAC,OAAO;QACnC,CAAC,CAAC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACvE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;IACF,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;IAC/F,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;IAEb,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,yBAAyB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;YAC1F,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;SAAM;QACL,YAAY,GAAG;YACb,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;YACnB,yBAAyB,CAAC,QAAQ;gBAChC,CAAC,CAAC,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;gBACjE,CAAC,CAAC,EAAE;YACN,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;YAChE,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;YAC5E,OAAO;YACP,EAAE,EAAE,uDAAuD;SAC5D,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;KACd;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js new file mode 100644 index 0000000000..ea435c1343 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=BlobSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map new file mode 100644 index 0000000000..c3768ea729 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAA/B;QAiGE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;IA6C1C,CAAC;IAlMC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QAEpD,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;gBACR,KAAK,GAAG;oBACN,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,EAAE,CAAC,CAAC;aACvD;SACF;QAED,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAAsC;QACvD,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;SACjC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;SAClC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;SACzC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;SAC/B;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;SAChC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;SACnC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACjD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;SAC3C;QACD,OAAO,kBAAkB,CAAC;IAC5B,CAAC;IAyDD;;;;;OAKG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js new file mode 100644 index 0000000000..f7d47f9e2e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js @@ -0,0 +1,555 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BlobSASPermissions } from "./BlobSASPermissions"; +import { ContainerSASPermissions } from "./ContainerSASPermissions"; +import { StorageSharedKeyCredential } from "../credentials/StorageSharedKeyCredential"; +import { UserDelegationKeyCredential } from "../credentials/UserDelegationKeyCredential"; +import { ipRangeToString } from "./SasIPRange"; +import { SASQueryParameters } from "./SASQueryParameters"; +import { SERVICE_VERSION } from "../utils/constants"; +import { truncatedISO8061Date } from "../utils/utils.common"; +export function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} +//# sourceMappingURL=BlobSASSignatureValues.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map new file mode 100644 index 0000000000..38f9b76dbb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobSASSignatureValues.js","sourceRoot":"","sources":["../../../../src/sas/BlobSASSignatureValues.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAClC,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAE1D,OAAO,EAAE,uBAAuB,EAAE,MAAM,2BAA2B,CAAC;AACpE,OAAO,EAAE,0BAA0B,EAAE,MAAM,2CAA2C,CAAC;AACvF,OAAO,EAAE,2BAA2B,EAAE,MAAM,4CAA4C,CAAC;AACzF,OAAO,EAAE,eAAe,EAAc,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAe,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,MAAM,oBAAoB,CAAC;AACrD,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AA4O7D,MAAM,UAAU,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB;IAEpB,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAElG,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;QAC1E,CAAC,CAAC,sCAAsC;QACxC,CAAC,CAAC,SAAS,CAAC;IAChB,IAAI,2BAAoE,CAAC;IAEzE,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;KACH;IAED,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;QAClF,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;KACnF;IAED,8DAA8D;IAC9D,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;SACH;KACF;IAED,gEAAgE;IAChE,gGAAgG;IAChG,yHAAyH;IACzH,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,8HAA8H;YAC9H,IAAI,OAAO,IAAI,YAAY,EAAE;gBAC3B,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;iBAAM;gBACL,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;aACH;SACF;KACF;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;YACrC,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;SAC5F;aAAM;YACL,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;SACH;KACF;IAED,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;KAChB;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;GAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C;IAE/C,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,CAAC,CAAC,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;QACA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,sBAAsB,CAAC,UAAU;QACjC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,sBAAsB,CAAC,YAAY,CAAC,CAAC,CAAC,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,sBAAsB,CAAC,kBAAkB,CAAC,CAAC,CAAC,EAAE;QAC1F,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,sBAAsB,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE;QACpF,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,sBAAsB,CAAC,WAAW,CAAC,CAAC,CAAC,EAAE;KAC7E,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;GAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD;IAExD,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,sEAAsE;IACtE,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;QAC5E,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;KACH;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;SACjB;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;YAChB,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;SAC9C;KACF;IAED,8FAA8F;IAC9F,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;YACnC,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;aAAM;YACL,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;SACd;KACF;IAED,uDAAuD;IACvD,MAAM,YAAY,GAAG;QACnB,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,EAAE;QAC9C,sBAAsB,CAAC,QAAQ;YAC7B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;YAC9D,CAAC,CAAC,EAAE;QACN,sBAAsB,CAAC,SAAS;YAC9B,CAAC,CAAC,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;YAC/D,CAAC,CAAC,EAAE;QACN,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;YAC1D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;YAC3F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;YAC3D,CAAC,CAAC,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;YAC5F,CAAC,CAAC,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;QACT,sBAAsB,CAAC,aAAa;QACpC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE;QACrF,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;QACtE,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY;QACnC,sBAAsB,CAAC,kBAAkB;QACzC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,WAAW;KACnC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAC9E,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB;IACrF,2CAA2C;IAC3C,oDAAoD;IACpD,MAAM,QAAQ,GAAa,CAAC,SAAS,WAAW,IAAI,aAAa,EAAE,CAAC,CAAC;IACrE,IAAI,QAAQ,EAAE;QACZ,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,EAAE,CAAC,CAAC;KAC/B;IACD,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C;IAE9C,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;IAClG,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;QACjE,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;QACxF,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;KAC5E;IAED,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;QAC9D,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;KACnF;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;QACrF,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;KACzE;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;KACrF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;QACA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;KACtF;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QAClC,CAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;QACA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;KACjG;IAED,IACE,OAAO,GAAG,YAAY;QACtB,sBAAsB,CAAC,WAAW;QACjC,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;QACA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;KAC1F;IAED,IACE,OAAO,GAAG,YAAY;QACtB,CAAC,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;QACA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;KACH;IAED,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;QACpE,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;KAC/F;IAED,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;IACzC,OAAO,sBAAsB,CAAC;AAChC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js new file mode 100644 index 0000000000..d25bfd4400 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js @@ -0,0 +1,221 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} +//# sourceMappingURL=ContainerSASPermissions.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map new file mode 100644 index 0000000000..068b7ab866 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/ContainerSASPermissions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ContainerSASPermissions.js","sourceRoot":"","sources":["../../../../src/sas/ContainerSASPermissions.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;;;GAMG;AACH,MAAM,OAAO,uBAAuB;IAApC;QA6GE;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,UAAK,GAAY,KAAK,CAAC;QAE9B;;WAEG;QACI,WAAM,GAAY,KAAK,CAAC;QAE/B;;WAEG;QACI,kBAAa,GAAY,KAAK,CAAC;QAEtC;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,QAAG,GAAY,KAAK,CAAC;QAE5B;;WAEG;QACI,SAAI,GAAY,KAAK,CAAC;QAE7B;;WAEG;QACI,YAAO,GAAY,KAAK,CAAC;QAEhC;;WAEG;QACI,0BAAqB,GAAY,KAAK,CAAC;QAE9C;;WAEG;QACI,oBAAe,GAAY,KAAK,CAAC;QAExC;;WAEG;QACI,iBAAY,GAAY,KAAK,CAAC;IAqDvC,CAAC;IAhOC;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,WAAmB;QACrC,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAE9D,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;YAC9B,QAAQ,IAAI,EAAE;gBACZ,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;gBACR,KAAK,GAAG;oBACN,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;gBACR;oBACE,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,EAAE,CAAC,CAAC;aACtD;SACF;QAED,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAED;;;;;OAKG;IACI,MAAM,CAAC,IAAI,CAAC,cAA2C;QAC5D,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;YACxB,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;SACtC;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;YACzB,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;SACvC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;YAChC,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;SAC9C;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;YACtB,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;SACpC;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;YACvB,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;SACrC;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;YAC1B,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;SACxC;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;YACxC,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;SACtD;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;YAClC,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;SAChD;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;YAC/B,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;SAC7C;QACD,OAAO,uBAAuB,CAAC;IACjC,CAAC;IAmED;;;;;;;OAOG;IACI,QAAQ;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;YACd,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;YACf,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;YACtB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;YACZ,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;YACb,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;YAC9B,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;YACxB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;QACD,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC9B,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js new file mode 100644 index 0000000000..c3869d63de --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js @@ -0,0 +1,234 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { ipRangeToString } from "./SasIPRange"; +import { truncatedISO8061Date } from "../utils/utils.common"; +/** + * Protocols for generated SAS. + */ +export var SASProtocol; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(SASProtocol || (SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} +//# sourceMappingURL=SASQueryParameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map new file mode 100644 index 0000000000..7b597a2961 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SASQueryParameters.js","sourceRoot":"","sources":["../../../../src/sas/SASQueryParameters.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAc,eAAe,EAAE,MAAM,cAAc,CAAC;AAC3D,OAAO,EAAE,oBAAoB,EAAE,MAAM,uBAAuB,CAAC;AAG7D;;GAEG;AACH,MAAM,CAAN,IAAY,WAUX;AAVD,WAAY,WAAW;IACrB;;OAEG;IACH,8BAAe,CAAA;IAEf;;OAEG;IACH,0CAA2B,CAAA;AAC7B,CAAC,EAVW,WAAW,KAAX,WAAW,QAUtB;AA4FD;;;;;;;;GAQG;AACH,MAAM,OAAO,kBAAkB;IAsN7B,YACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB;QAExB,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QACvB,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;YAClF,4BAA4B;YAC5B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YACpD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;YACxD,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;YAChD,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;YACjD,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;YAClD,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;YAC9C,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;YACtD,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;YAClE,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;YAC5D,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAE1E,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;gBAClF,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;aACzD;SACF;aAAM;YACL,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;YACnC,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;YAC3B,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;YACxC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;YAC5B,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;YAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;YACzB,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;YACjC,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;YAC7C,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;YACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;YAE/B,IAAI,iBAAiB,EAAE;gBACrB,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBAClD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;gBACvD,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;gBACzD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBACrD,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;gBAErD,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;gBAC7D,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;aACpC;SACF;IACH,CAAC;IA1JD;;;;OAIG;IACH,IAAW,OAAO;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;gBACL,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;gBAC1B,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;SACH;QACD,OAAO,SAAS,CAAC;IACnB,CAAC;IA+ID;;;OAGG;IACI,QAAQ;QACb,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;YAC1B,QAAQ,KAAK,EAAE;gBACb,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACvE,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACzE,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CACzD,CAAC;oBACF,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO,EAAE,mBAAmB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK,EAAE,wBAAwB;oBAClC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,yBAAyB;oBACnC,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK,EAAE,qBAAqB;oBAC/B,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;gBACR,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;gBACR,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;aACT;SACF;QACD,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC3B,CAAC;IAED;;;;;;OAMG;IACK,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;SACR;QAED,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;QAC9B,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,GAAG,GAAG,IAAI,KAAK,EAAE,CAAC,CAAC;SACjC;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js new file mode 100644 index 0000000000..9f386b6725 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +export function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} +//# sourceMappingURL=SasIPRange.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map new file mode 100644 index 0000000000..0a67a117fb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SasIPRange.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SasIPRange.js","sourceRoot":"","sources":["../../../../src/sas/SasIPRange.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAkBlC;;;;;;GAMG;AACH,MAAM,UAAU,eAAe,CAAC,OAAmB;IACjD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,KAAK,CAAC;AACzE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js new file mode 100644 index 0000000000..d26820806d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events +import { EventEmitter } from "events"; +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +export class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} +//# sourceMappingURL=Batch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map new file mode 100644 index 0000000000..62abe3e60d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Batch.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Batch.js","sourceRoot":"","sources":["../../../../src/utils/Batch.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,8FAA8F;AAC9F,mCAAmC;AACnC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAOtC;;GAEG;AACH,IAAK,WAGJ;AAHD,WAAK,WAAW;IACd,6CAAI,CAAA;IACJ,+CAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,QAGf;AAED;;;;GAIG;AACH,MAAM,OAAO,KAAK;IAqChB;;;OAGG;IACH,YAAmB,cAAsB,CAAC;QAnC1C;;WAEG;QACK,YAAO,GAAW,CAAC,CAAC;QAE5B;;WAEG;QACK,cAAS,GAAW,CAAC,CAAC;QAE9B;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,eAAU,GAAgB,EAAE,CAAC;QAErC;;;WAGG;QACK,UAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;SAC3D;QACD,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,YAAY,EAAE,CAAC;IACpC,CAAC;IAED;;;;OAIG;IACI,YAAY,CAAC,SAAoB;QACtC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;aACxB;YAAC,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;aACnC;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACI,KAAK,CAAC,EAAE;QACb,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAChC,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;SAC1B;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;gBACjC,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;OAGG;IACK,aAAa;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;SACvC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;;OAIG;IACK,eAAe;QACrB,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;SACR;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YAC5C,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;SACR;QAED,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;YACtC,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;YACvC,IAAI,SAAS,EAAE;gBACb,SAAS,EAAE,CAAC;aACb;iBAAM;gBACL,OAAO;aACR;SACF;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js new file mode 100644 index 0000000000..cfe60cc023 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +import { AvroReadableFromStream, AvroReader } from "../../../storage-internal-avro/src"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +export class BlobQuickQueryStream extends Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} +//# sourceMappingURL=BlobQuickQueryStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map new file mode 100644 index 0000000000..db25b58da4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/BlobQuickQueryStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BlobQuickQueryStream.js","sourceRoot":"","sources":["../../../../src/utils/BlobQuickQueryStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAKlC,OAAO,EAAE,sBAAsB,EAAE,UAAU,EAAE,MAAM,oCAAoC,CAAC;AAqBxF;;;;GAIG;AACH,MAAM,OAAO,oBAAqB,SAAQ,QAAQ;IAQhD;;;;;OAKG;IACH,YAAmB,MAA6B,EAAE,UAAuC,EAAE;QACzF,KAAK,EAAE,CAAC;QAXF,eAAU,GAAY,IAAI,CAAC;QAYjC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;QAC1E,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACrF,CAAC;IAEM,KAAK;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAC1B,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;IAEO,KAAK,CAAC,YAAY;QACxB,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;QACxB,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;aACP;YACD,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;YAC3B,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;YACpC,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;gBAC9B,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;aAC/C;YAED,QAAQ,MAAM,EAAE;gBACd,KAAK,0DAA0D;oBAC7D;wBACE,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;4BACxC,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;4BACjC,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;yBACxB;qBACF;oBACD,MAAM;gBACR,KAAK,wDAAwD;oBAC3D;wBACE,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;wBAC/C,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;4BACpC,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;yBAC9D;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;yBAChD;qBACF;oBACD,MAAM;gBACR,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnB,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;wBAC3C,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;4BAClC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;qBAC9C;oBACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;gBACR,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;wBAChB,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;wBACjC,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;4BAC9B,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;yBACpD;wBACD,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;wBAC/B,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;4BAC5B,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;yBACnD;wBACD,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;wBAC7C,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;4BACnC,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;yBAC1D;wBACD,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;wBACvC,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;4BAChC,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;yBACvD;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;4BACJ,OAAO,EAAE,KAAK;4BACd,WAAW;yBACZ,CAAC,CAAC;qBACJ;oBACD,MAAM;gBACR;oBACE,MAAM,KAAK,CAAC,kBAAkB,MAAM,2BAA2B,CAAC,CAAC;aACpE;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js new file mode 100644 index 0000000000..d2e1dc6768 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js @@ -0,0 +1,65 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +export class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; +//# sourceMappingURL=Mutex.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map new file mode 100644 index 0000000000..f437159442 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/Mutex.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Mutex.js","sourceRoot":"","sources":["../../../../src/utils/Mutex.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,IAAK,eAGJ;AAHD,WAAK,eAAe;IAClB,yDAAM,CAAA;IACN,6DAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,QAGnB;AAID;;GAEG;AACH,MAAM,OAAO,KAAK;IAChB;;;;;OAKG;IACI,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAW;QAClC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;gBACxC,OAAO,EAAE,CAAC;aACX;iBAAM;gBACL,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,GAAG,EAAE;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;oBACxC,OAAO,EAAE,CAAC;gBACZ,CAAC,CAAC,CAAC;aACJ;QACH,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,GAAW;QACpC,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,EAAE;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;gBAC7C,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;aAC3B;YACD,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACtB,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;IACL,CAAC;IAKO,MAAM,CAAC,aAAa,CAAC,GAAW,EAAE,OAAiB;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;SACjC;aAAM;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;SACnC;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,GAAW;QACxC,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,GAAG,EAAE;gBAChB,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACtB,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;;AAlBc,UAAI,GAAuC,EAAE,CAAC;AAC9C,eAAS,GAAkC,EAAE,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js new file mode 100644 index 0000000000..2d58e9d23e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +export class RetriableReadableStream extends Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} +//# sourceMappingURL=RetriableReadableStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map new file mode 100644 index 0000000000..6a24ff3230 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"RetriableReadableStream.js","sourceRoot":"","sources":["../../../../src/utils/RetriableReadableStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,QAAQ,EAAE,MAAM,QAAQ,CAAC;AAgClC;;;;GAIG;AACH,MAAM,OAAO,uBAAwB,SAAQ,QAAQ;IAWnD;;;;;;;;;OASG;IACH,YACE,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,UAA0C,EAAE;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,YAAO,GAAW,CAAC,CAAC;QAoDpB,sBAAiB,GAAG,CAAC,IAAY,EAAE,EAAE;YAC3C,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;gBAClC,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;gBAC3C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;gBACpB,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;gBACvC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;aACR;YAED,eAAe;YACf,2EAA2E;YAC3E,KAAK;YACL,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;gBACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;aAC5D;YACD,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;gBACpB,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;aACrB;QACH,CAAC,CAAC;QAEM,4BAAuB,GAAG,CAAC,GAAW,EAAE,EAAE;YAChD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;gBACpC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;aACR;YAED,eAAe;YACf,kDAAkD;YAClD,kBAAkB;YAClB,+BAA+B;YAC/B,KAAK;YACL,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;gBAChC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACjB;iBAAM,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;gBAClC,eAAe;gBACf,gEAAgE;gBAChE,KAAK;gBACL,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;oBACxC,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;oBAClB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;yBACrB,IAAI,CAAC,CAAC,SAAS,EAAE,EAAE;wBAClB,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;oBACT,CAAC,CAAC;yBACD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;wBACf,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;oBACtB,CAAC,CAAC,CAAC;iBACN;qBAAM;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,sHACE,IAAI,CAAC,MAAM,GAAG,CAChB,yBAAyB,IAAI,CAAC,GAAG,cAAc,IAAI,CAAC,OAAO,kBACzD,IAAI,CAAC,gBACP,EAAE,CACH,CACF,CAAC;iBACH;aACF;iBAAM;gBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,sBAAsB,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CACxC,CACF,CAAC;aACH;QACH,CAAC,CAAC;QAnGA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;QAC9B,IAAI,CAAC,gBAAgB;YACnB,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3F,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;QACrC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;IAChC,CAAC;IAEM,KAAK;QACV,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;IACvB,CAAC;IAEO,sBAAsB;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACxD,CAAC;IAEO,yBAAyB;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;IACpE,CAAC;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC;QAC7D,iDAAiD;QACjD,IAAI,CAAC,yBAAyB,EAAE,CAAC;QAChC,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;QAEpC,QAAQ,CAAC,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC/C,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js new file mode 100644 index 0000000000..131ba87276 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { DefaultHttpClient } from "@azure/core-http"; +const _defaultHttpClient = new DefaultHttpClient(); +export function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} +//# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map new file mode 100644 index 0000000000..8156a9f0b8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js.map @@ -0,0 +1 @@ +{"version":3,"file":"cache.js","sourceRoot":"","sources":["../../../../src/utils/cache.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,iBAAiB,EAAE,MAAM,kBAAkB,CAAC;AAGrD,MAAM,kBAAkB,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAEnD,MAAM,UAAU,0BAA0B;IACxC,OAAO,kBAAkB,CAAC;AAC5B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js new file mode 100644 index 0000000000..db5dfe448a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js @@ -0,0 +1,198 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "12.11.0"; +export const SERVICE_VERSION = "2021-08-06"; +export const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +export const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +export const BLOCK_BLOB_MAX_BLOCKS = 50000; +export const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +export const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +export const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +/** + * The OAuth scope to use with Azure Storage. + */ +export const StorageOAuthScopes = "https://storage.azure.com/.default"; +export const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +export const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +export const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +export const ETagNone = ""; +export const ETagAny = "*"; +export const SIZE_1_MB = 1 * 1024 * 1024; +export const BATCH_MAX_REQUEST = 256; +export const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +export const HTTP_LINE_ENDING = "\r\n"; +export const HTTP_VERSION_1_1 = "HTTP/1.1"; +export const EncryptionAlgorithmAES25 = "AES256"; +export const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +export const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +export const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +export const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +export const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map new file mode 100644 index 0000000000..9832c28a5d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"constants.js","sourceRoot":"","sources":["../../../../src/utils/constants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,WAAW,GAAW,SAAS,CAAC;AAC7C,MAAM,CAAC,MAAM,eAAe,GAAW,YAAY,CAAC;AAEpD,MAAM,CAAC,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,QAAQ;AACnF,MAAM,CAAC,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,SAAS;AACrF,MAAM,CAAC,MAAM,qBAAqB,GAAW,KAAK,CAAC;AACnD,MAAM,CAAC,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAC9E,MAAM,CAAC,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,MAAM;AAChF,MAAM,CAAC,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAC7D;;GAEG;AACH,MAAM,CAAC,MAAM,kBAAkB,GAAsB,oCAAoC,CAAC;AAE1F,MAAM,CAAC,MAAM,YAAY,GAAG;IAC1B,UAAU,EAAE;QACV,sBAAsB,EAAE,GAAG;QAC3B,SAAS,EAAE,KAAK;QAChB,QAAQ,EAAE,UAAU;QACpB,SAAS,EAAE,WAAW;QACtB,OAAO,EAAE,SAAS;KACnB;CACF,CAAC;AAEF,MAAM,CAAC,MAAM,iBAAiB,GAAG;IAC/B,aAAa,EAAE,GAAG;IAClB,aAAa,EAAE,GAAG;IAClB,cAAc,EAAE,GAAG;IACnB,kBAAkB,EAAE,GAAG;IACvB,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEF,MAAM,CAAC,MAAM,eAAe,GAAG;IAC7B,aAAa,EAAE,eAAe;IAC9B,oBAAoB,EAAE,QAAQ;IAC9B,gBAAgB,EAAE,kBAAkB;IACpC,UAAU,EAAE,YAAY;IACxB,gBAAgB,EAAE,kBAAkB;IACpC,cAAc,EAAE,gBAAgB;IAChC,WAAW,EAAE,aAAa;IAC1B,yBAAyB,EAAE,2BAA2B;IACtD,YAAY,EAAE,cAAc;IAC5B,MAAM,EAAE,QAAQ;IAChB,IAAI,EAAE,MAAM;IACZ,QAAQ,EAAE,UAAU;IACpB,iBAAiB,EAAE,mBAAmB;IACtC,aAAa,EAAE,eAAe;IAC9B,mBAAmB,EAAE,qBAAqB;IAC1C,kBAAkB,EAAE,OAAO;IAC3B,KAAK,EAAE,OAAO;IACd,UAAU,EAAE,YAAY;IACxB,sBAAsB,EAAE,wBAAwB;IAChD,gBAAgB,EAAE,kBAAkB;IACpC,SAAS,EAAE,WAAW;IACtB,eAAe,EAAE,iBAAiB;IAClC,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEF,MAAM,CAAC,MAAM,QAAQ,GAAG,EAAE,CAAC;AAC3B,MAAM,CAAC,MAAM,OAAO,GAAG,GAAG,CAAC;AAE3B,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AACzC,MAAM,CAAC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AACrC,MAAM,CAAC,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACxD,MAAM,CAAC,MAAM,gBAAgB,GAAG,MAAM,CAAC;AACvC,MAAM,CAAC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAE3C,MAAM,CAAC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAEjD,MAAM,CAAC,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAElQ,MAAM,CAAC,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEF,MAAM,CAAC,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEF,MAAM,CAAC,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AAC5F,MAAM,CAAC,MAAM,yCAAyC,GACpD,2CAA2C,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.11.0\";\nexport const SERVICE_VERSION: string = \"2021-08-06\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js new file mode 100644 index 0000000000..66a4527e3d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createSpanFunction } from "@azure/core-tracing"; +/** + * Creates a span using the global tracer. + * @internal + */ +export const createSpan = createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +export function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} +//# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map new file mode 100644 index 0000000000..82a88ca9ec --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracing.js","sourceRoot":"","sources":["../../../../src/utils/tracing.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAEzD;;;GAGG;AACH,MAAM,CAAC,MAAM,UAAU,GAAG,kBAAkB,CAAC;IAC3C,aAAa,EAAE,oBAAoB;IACnC,SAAS,EAAE,mBAAmB;CAC/B,CAAC,CAAC;AAEH;;;;;;GAMG;AACH,MAAM,UAAU,kCAAkC,CAChD,OAA0B;;IAE1B,OAAO;QACL,+HAA+H;QAC/H,WAAW,EAAE,MAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAsB,0CAAE,WAAW;QAC1D,cAAc,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,cAAc,0CAAE,cAAc;KACxD,CAAC;AACJ,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js new file mode 100644 index 0000000000..8b76ae8e90 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Convert a Browser Blob object into ArrayBuffer. + * + * @param blob - + */ +export async function blobToArrayBuffer(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsArrayBuffer(blob); + }); +} +/** + * Convert a Browser Blob object into string. + * + * @param blob - + */ +export async function blobToString(blob) { + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + fileReader.onloadend = (ev) => { + resolve(ev.target.result); + }; + fileReader.onerror = reject; + fileReader.readAsText(blob); + }); +} +export function streamToBuffer() { + /* empty */ +} +export function streamToBuffer2() { + /* empty */ +} +export function readStreamToLocalFile() { + /* empty */ +} +export const fsStat = function stat() { + /* empty */ +}; +export const fsCreateReadStream = function createReadStream() { + /* empty */ +}; +//# sourceMappingURL=utils.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map new file mode 100644 index 0000000000..be89432403 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.browser.js","sourceRoot":"","sources":["../../../../src/utils/utils.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,IAAU;IAChD,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAc,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAClD,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,YAAY,CAAC,IAAU;IAC3C,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;IACpC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;YACjC,OAAO,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC;QAC7B,CAAC,CAAC;QACF,UAAU,CAAC,OAAO,GAAG,MAAM,CAAC;QAC5B,UAAU,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACL,CAAC;AAED,MAAM,UAAU,cAAc;IAC5B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,eAAe;IAC7B,WAAW;AACb,CAAC;AAED,MAAM,UAAU,qBAAqB;IACnC,WAAW;AACb,CAAC;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,SAAS,IAAI;IACjC,WAAW;AACb,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,kBAAkB,GAAG,SAAS,gBAAgB;IACzD,WAAW;AACb,CAAC,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Convert a Browser Blob object into ArrayBuffer.\n *\n * @param blob -\n */\nexport async function blobToArrayBuffer(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsArrayBuffer(blob);\n });\n}\n\n/**\n * Convert a Browser Blob object into string.\n *\n * @param blob -\n */\nexport async function blobToString(blob: Blob): Promise {\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n fileReader.onloadend = (ev: any) => {\n resolve(ev.target!.result);\n };\n fileReader.onerror = reject;\n fileReader.readAsText(blob);\n });\n}\n\nexport function streamToBuffer(): void {\n /* empty */\n}\n\nexport function streamToBuffer2(): void {\n /* empty */\n}\n\nexport function readStreamToLocalFile(): void {\n /* empty */\n}\n\nexport const fsStat = function stat(): void {\n /* empty */\n};\n\nexport const fsCreateReadStream = function createReadStream(): void {\n /* empty */\n};\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js new file mode 100644 index 0000000000..24116df48c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js @@ -0,0 +1,992 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpHeaders, isNode, URLBuilder } from "@azure/core-http"; +import { DevelopmentConnectionString, HeaderConstants, URLConstants } from "./constants"; +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +export function escapeURLPath(url) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +export function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +export function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + const accountName = getAccountNameFromUrl(blobEndpoint); + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +export function appendToURLPath(url, name) { + const urlParsed = URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +export function setURLParameter(url, name, value) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +export function getURLParameter(url, name) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +export function setURLHost(url, host) { + const urlParsed = URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +export function getURLPath(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +export function getURLScheme(url) { + const urlParsed = URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +export function getURLPathAndQuery(url) { + const urlParsed = URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +export function getURLQueries(url) { + let queryString = URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +export function appendToURLQuery(url, queryParts) { + const urlParsed = URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +export function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +export function base64encode(content) { + return !isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Base64 decode. + * + * @param encodedString - + */ +export function base64decode(encodedString) { + return !isNode ? atob(encodedString) : Buffer.from(encodedString, "base64").toString(); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +export function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +export async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +export function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +export function sanitizeURL(url) { + let safeURL = url; + if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) { + safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, "*****"); + } + return safeURL; +} +export function sanitizeHeaders(originalHeader) { + const headers = new HttpHeaders(); + for (const header of originalHeader.headersArray()) { + if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(header.name, "*****"); + } + else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(header.name, sanitizeURL(header.value)); + } + else { + headers.set(header.name, header.value); + } + } + return headers; +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +export function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +export function getAccountNameFromUrl(url) { + const parsedUrl = URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +export function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port), use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +export function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +export function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +export function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +export function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +export function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +export function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +export function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +export function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +export function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +export function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobProperties(blobPropertiesInXML) { + const blobProperties = blobPropertiesInXML; + if (blobPropertiesInXML["Creation-Time"]) { + blobProperties.createdOn = new Date(blobPropertiesInXML["Creation-Time"]); + delete blobProperties["Creation-Time"]; + } + if (blobPropertiesInXML["Last-Modified"]) { + blobProperties.lastModified = new Date(blobPropertiesInXML["Last-Modified"]); + delete blobProperties["Last-Modified"]; + } + if (blobPropertiesInXML["Etag"]) { + blobProperties.etag = blobPropertiesInXML["Etag"]; + delete blobProperties["Etag"]; + } + if (blobPropertiesInXML["Content-Length"]) { + blobProperties.contentLength = parseFloat(blobPropertiesInXML["Content-Length"]); + delete blobProperties["Content-Length"]; + } + if (blobPropertiesInXML["Content-Type"]) { + blobProperties.contentType = blobPropertiesInXML["Content-Type"]; + delete blobProperties["Content-Type"]; + } + if (blobPropertiesInXML["Content-Encoding"]) { + blobProperties.contentEncoding = blobPropertiesInXML["Content-Encoding"]; + delete blobProperties["Content-Encoding"]; + } + if (blobPropertiesInXML["Content-Language"]) { + blobProperties.contentLanguage = blobPropertiesInXML["Content-Language"]; + delete blobProperties["Content-Language"]; + } + if (blobPropertiesInXML["Content-MD5"]) { + blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML["Content-MD5"]); + delete blobProperties["Content-MD5"]; + } + if (blobPropertiesInXML["Content-Disposition"]) { + blobProperties.contentDisposition = blobPropertiesInXML["Content-Disposition"]; + delete blobProperties["Content-Disposition"]; + } + if (blobPropertiesInXML["Cache-Control"]) { + blobProperties.cacheControl = blobPropertiesInXML["Cache-Control"]; + delete blobProperties["Cache-Control"]; + } + if (blobPropertiesInXML["x-ms-blob-sequence-number"]) { + blobProperties.blobSequenceNumber = parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]); + delete blobProperties["x-ms-blob-sequence-number"]; + } + if (blobPropertiesInXML["BlobType"]) { + blobProperties.blobType = blobPropertiesInXML["BlobType"]; + delete blobProperties["BlobType"]; + } + if (blobPropertiesInXML["LeaseStatus"]) { + blobProperties.leaseStatus = blobPropertiesInXML["LeaseStatus"]; + delete blobProperties["LeaseStatus"]; + } + if (blobPropertiesInXML["LeaseState"]) { + blobProperties.leaseState = blobPropertiesInXML["LeaseState"]; + delete blobProperties["LeaseState"]; + } + if (blobPropertiesInXML["LeaseDuration"]) { + blobProperties.leaseDuration = blobPropertiesInXML["LeaseDuration"]; + delete blobProperties["LeaseDuration"]; + } + if (blobPropertiesInXML["CopyId"]) { + blobProperties.copyId = blobPropertiesInXML["CopyId"]; + delete blobProperties["CopyId"]; + } + if (blobPropertiesInXML["CopyStatus"]) { + blobProperties.copyStatus = blobPropertiesInXML["CopyStatus"]; + delete blobProperties["CopyStatus"]; + } + if (blobPropertiesInXML["CopySource"]) { + blobProperties.copySource = blobPropertiesInXML["CopySource"]; + delete blobProperties["CopySource"]; + } + if (blobPropertiesInXML["CopyProgress"]) { + blobProperties.copyProgress = blobPropertiesInXML["CopyProgress"]; + delete blobProperties["CopyProgress"]; + } + if (blobPropertiesInXML["CopyCompletionTime"]) { + blobProperties.copyCompletedOn = new Date(blobPropertiesInXML["CopyCompletionTime"]); + delete blobProperties["CopyCompletionTime"]; + } + if (blobPropertiesInXML["CopyStatusDescription"]) { + blobProperties.copyStatusDescription = blobPropertiesInXML["CopyStatusDescription"]; + delete blobProperties["CopyStatusDescription"]; + } + if (blobPropertiesInXML["ServerEncrypted"]) { + blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML["ServerEncrypted"]); + delete blobProperties["ServerEncrypted"]; + } + if (blobPropertiesInXML["IncrementalCopy"]) { + blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML["IncrementalCopy"]); + delete blobProperties["IncrementalCopy"]; + } + if (blobPropertiesInXML["DestinationSnapshot"]) { + blobProperties.destinationSnapshot = blobPropertiesInXML["DestinationSnapshot"]; + delete blobProperties["DestinationSnapshot"]; + } + if (blobPropertiesInXML["DeletedTime"]) { + blobProperties.deletedOn = new Date(blobPropertiesInXML["DeletedTime"]); + delete blobProperties["DeletedTime"]; + } + if (blobPropertiesInXML["RemainingRetentionDays"]) { + blobProperties.remainingRetentionDays = parseFloat(blobPropertiesInXML["RemainingRetentionDays"]); + delete blobProperties["RemainingRetentionDays"]; + } + if (blobPropertiesInXML["AccessTier"]) { + blobProperties.accessTier = blobPropertiesInXML["AccessTier"]; + delete blobProperties["AccessTier"]; + } + if (blobPropertiesInXML["AccessTierInferred"]) { + blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML["AccessTierInferred"]); + delete blobProperties["AccessTierInferred"]; + } + if (blobPropertiesInXML["ArchiveStatus"]) { + blobProperties.archiveStatus = blobPropertiesInXML["ArchiveStatus"]; + delete blobProperties["ArchiveStatus"]; + } + if (blobPropertiesInXML["CustomerProvidedKeySha256"]) { + blobProperties.customerProvidedKeySha256 = blobPropertiesInXML["CustomerProvidedKeySha256"]; + delete blobProperties["CustomerProvidedKeySha256"]; + } + if (blobPropertiesInXML["EncryptionScope"]) { + blobProperties.encryptionScope = blobPropertiesInXML["EncryptionScope"]; + delete blobProperties["EncryptionScope"]; + } + if (blobPropertiesInXML["AccessTierChangeTime"]) { + blobProperties.accessTierChangedOn = new Date(blobPropertiesInXML["AccessTierChangeTime"]); + delete blobProperties["AccessTierChangeTime"]; + } + if (blobPropertiesInXML["TagCount"]) { + blobProperties.tagCount = parseFloat(blobPropertiesInXML["TagCount"]); + delete blobProperties["TagCount"]; + } + if (blobPropertiesInXML["Expiry-Time"]) { + blobProperties.expiresOn = new Date(blobPropertiesInXML["Expiry-Time"]); + delete blobProperties["Expiry-Time"]; + } + if (blobPropertiesInXML["Sealed"]) { + blobProperties.isSealed = ParseBoolean(blobPropertiesInXML["Sealed"]); + delete blobProperties["Sealed"]; + } + if (blobPropertiesInXML["RehydratePriority"]) { + blobProperties.rehydratePriority = blobPropertiesInXML["RehydratePriority"]; + delete blobProperties["RehydratePriority"]; + } + if (blobPropertiesInXML["LastAccessTime"]) { + blobProperties.lastAccessedOn = new Date(blobPropertiesInXML["LastAccessTime"]); + delete blobProperties["LastAccessTime"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyUntilDate"]) { + blobProperties.immutabilityPolicyExpiresOn = new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]); + delete blobProperties["ImmutabilityPolicyUntilDate"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyMode"]) { + blobProperties.immutabilityPolicyMode = blobPropertiesInXML["ImmutabilityPolicyMode"]; + delete blobProperties["ImmutabilityPolicyMode"]; + } + if (blobPropertiesInXML["LegalHold"]) { + blobProperties.legalHold = ParseBoolean(blobPropertiesInXML["LegalHold"]); + delete blobProperties["LegalHold"]; + } + return blobProperties; +} +function ParseBlobItem(blobInXML) { + const blobItem = blobInXML; + blobItem.properties = ParseBlobProperties(blobInXML["Properties"]); + delete blobItem["Properties"]; + blobItem.name = ParseBlobName(blobInXML["Name"]); + delete blobItem["Name"]; + blobItem.deleted = ParseBoolean(blobInXML["Deleted"]); + delete blobItem["Deleted"]; + if (blobInXML["Snapshot"]) { + blobItem.snapshot = blobInXML["Snapshot"]; + delete blobItem["Snapshot"]; + } + if (blobInXML["VersionId"]) { + blobItem.versionId = blobInXML["VersionId"]; + delete blobItem["VersionId"]; + } + if (blobInXML["IsCurrentVersion"]) { + blobItem.isCurrentVersion = ParseBoolean(blobInXML["IsCurrentVersion"]); + delete blobItem["IsCurrentVersion"]; + } + if (blobInXML["Metadata"]) { + blobItem.metadata = blobInXML["Metadata"]; + delete blobItem["Metadata"]; + } + if (blobInXML["Tags"]) { + blobItem.blobTags = ParseBlobTags(blobInXML["Tags"]); + delete blobItem["Tags"]; + } + if (blobInXML["OrMetadata"]) { + blobItem.objectReplicationMetadata = blobInXML["OrMetadata"]; + delete blobItem["OrMetadata"]; + } + if (blobInXML["HasVersionsOnly"]) { + blobItem.hasVersionsOnly = ParseBoolean(blobInXML["HasVersionsOnly"]); + delete blobItem["HasVersionsOnly"]; + } + return blobItem; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +export function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +export function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} +export function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map new file mode 100644 index 0000000000..7bad806510 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAGlC,OAAO,EAAE,WAAW,EAAE,MAAM,EAAE,UAAU,EAAmB,MAAM,kBAAkB,CAAC;AA8BpF,OAAO,EAAE,2BAA2B,EAAE,eAAe,EAAE,YAAY,EAAE,MAAM,aAAa,CAAC;AAiBzF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmDG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;IAEnB,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;IACpB,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB;IAC5D,gCAAgC;IAChC,sKAAsK;IACtK,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;QACjE,4FAA4F;QAC5F,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACrD,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;aACxE;SACF;KACF;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,MAAM,UAAU,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAC7C,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YACvC,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;SACrD;KACF;IACD,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,4BAA4B,CAAC,gBAAwB;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;IAElB,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;QAC7D,gCAAgC;QAChC,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;KAChD;IAED,yDAAyD;IACzD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;IAC1E,uCAAuC;IACvC,kGAAkG;IAClG,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;QACA,4BAA4B;QAE5B,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;QAExB,2BAA2B;QAC3B,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;QACpE,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;YACjB,+DAA+D;YAC/D,6FAA6F;YAE7F,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;YAC9F,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;YACzD,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;gBAC/C,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;aACH;YAED,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;gBACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;aAC7E;YACD,YAAY,GAAG,GAAG,wBAAwB,MAAM,WAAW,SAAS,cAAc,EAAE,CAAC;SACtF;QAED,IAAI,CAAC,WAAW,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;SAC1E;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;SACzE;QAED,OAAO;YACL,IAAI,EAAE,mBAAmB;YACzB,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;KACH;SAAM;QACL,wBAAwB;QAExB,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;QACnF,MAAM,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;QACxD,IAAI,CAAC,YAAY,EAAE;YACjB,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;SAC/E;aAAM,IAAI,CAAC,UAAU,EAAE;YACtB,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;SACxF;QAED,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;KAC9E;AACH,CAAC;AAED;;;;GAIG;AACH,SAAS,MAAM,CAAC,IAAY;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;SAC5B,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,uBAAuB;SAC5C,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,iBAAiB;SACtC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;SACrB,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC,qBAAqB;AAChD,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IAC/B,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IACjF,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IAExB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc;IACvE,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;IACzC,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,GAAW,EAAE,IAAY;IACvD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW,EAAE,IAAY;IAClD,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,GAAW;IACpC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,GAAW;IACtC,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,kBAAkB,CAAC,GAAW;IAC5C,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACxC,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;QACf,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;KACzD;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;IAC7C,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;QACtB,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,WAAW,EAAE,CAAC,CAAC,qCAAqC;KACnH;IAED,OAAO,GAAG,UAAU,GAAG,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,GAAW;IACvC,IAAI,WAAW,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;QAChB,OAAO,EAAE,CAAC;KACX;IAED,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,EAAE,EAAE;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;QAChD,OAAO,CACL,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,CAC7F,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;IAC9C,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC/C,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACpC,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;QACtC,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KACtB;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,gBAAgB,CAAC,GAAW,EAAE,UAAkB;IAC9D,MAAM,SAAS,GAAG,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IAExC,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;IACjC,IAAI,KAAK,EAAE;QACT,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;KAC3B;SAAM;QACL,KAAK,GAAG,UAAU,CAAC;KACpB;IAED,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC1B,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,UAAU,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI;IAC/E,iEAAiE;IACjE,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;IAEtC,OAAO,gBAAgB;QACrB,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;QAC/D,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,OAAe;IAC1C,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,YAAY,CAAC,aAAqB;IAChD,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AACzF,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,eAAe,CAAC,aAAqB,EAAE,UAAkB;IACvE,mEAAmE;IACnE,MAAM,qBAAqB,GAAG,EAAE,CAAC;IAEjC,4EAA4E;IAC5E,MAAM,mBAAmB,GAAG,CAAC,CAAC;IAE9B,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;IAElF,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;KACvE;IACD,MAAM,GAAG,GACP,aAAa;QACb,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IACrF,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,2CAA2C;QAC3C,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,GAAG,EAAE;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;aACvB;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;QACrB,CAAC,CAAC;QAEF,MAAM,cAAc,GAAG,GAAG,EAAE;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aACpD;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC;QAEF,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;YACzB,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;SACjD;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG;IAEvB,+EAA+E;IAC/E,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;KACxD;IAED,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;IAC7B,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;QACvC,OAAO,aAAa,CAAC;KACtB;SAAM;QACL,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;QACnD,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;SAChE;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;KACzD;AACH,CAAC;AAED,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,IAAI,OAAO,GAAW,GAAG,CAAC;IAC1B,IAAI,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;QAC/D,OAAO,GAAG,eAAe,CAAC,OAAO,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;KAChF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AAED,MAAM,UAAU,eAAe,CAAC,cAA2B;IACzD,MAAM,OAAO,GAAgB,IAAI,WAAW,EAAE,CAAC;IAC/C,KAAK,MAAM,MAAM,IAAI,cAAc,CAAC,YAAY,EAAE,EAAE;QAClD,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,aAAa,CAAC,WAAW,EAAE,EAAE;YAC7E,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;SACnC;aAAM,IAAI,MAAM,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,eAAe,CAAC,gBAAgB,EAAE;YACzE,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;SACrD;aAAM;YACL,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,KAAK,CAAC,CAAC;SACxC;KACF;IAED,OAAO,OAAO,CAAC;AACjB,CAAC;AACD;;;;;GAKG;AACH,MAAM,UAAU,MAAM,CAAC,IAAY,EAAE,IAAY;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,qBAAqB,CAAC,GAAW;IAC/C,MAAM,SAAS,GAAe,UAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACpD,IAAI,WAAW,CAAC;IAChB,IAAI;QACF,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;YACjD,yEAAyE;YACzE,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;YACvC,iFAAiF;YACjF,2GAA2G;YAC3G,mCAAmC;YACnC,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SAClD;aAAM;YACL,qEAAqE;YACrE,WAAW,GAAG,EAAE,CAAC;SAClB;QACD,OAAO,WAAW,CAAC;KACpB;IAAC,OAAO,KAAU,EAAE;QACnB,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;KAC7E;AACH,CAAC;AAED,MAAM,UAAU,iBAAiB,CAAC,SAAqB;IACrD,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;QACrC,OAAO,KAAK,CAAC;KACd;IAED,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,GAAG,CAAC,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;IAE9F,sFAAsF;IACtF,gEAAgE;IAChE,wEAAwE;IACxE,wFAAwF;IACxF,OAAO,4HAA4H,CAAC,IAAI,CACtI,IAAI,CACL,CAAC;AACJ,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB,CAAC,IAAW;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;IACpB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,QAAQ,CAAC,IAAI,CAAC,GAAG,kBAAkB,CAAC,GAAG,CAAC,IAAI,kBAAkB,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;SAC1E;KACF;IAED,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,IAAW;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAa;QACpB,UAAU,EAAE,EAAE;KACf,CAAC;IAEF,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;QACtB,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;YACxB,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;aACN,CAAC,CAAC;SACJ;KACF;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,MAAM,CAAC,IAAe;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;QACtB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;IACrB,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;KAClC;IACD,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,oBAAoB,CAClC,iBAIiC;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;QACnC,OAAO,SAAS,CAAC;KAClB;IAED,QAAQ,iBAAiB,CAAC,IAAI,EAAE;QAC9B,KAAK,KAAK;YACR,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,WAAW;oBACjB,0BAA0B,EAAE;wBAC1B,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;wBACzD,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;wBAClD,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;wBACnD,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;qBACtD;iBACF;aACF,CAAC;QACJ,KAAK,MAAM;YACT,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,MAAM;oBACZ,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;qBACnD;iBACF;aACF,CAAC;QACJ,KAAK,OAAO;YACV,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,OAAO;oBACb,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;qBACjC;iBACF;aACF,CAAC;QACJ,KAAK,SAAS;YACZ,OAAO;gBACL,MAAM,EAAE;oBACN,IAAI,EAAE,SAAS;iBAChB;aACF,CAAC;QAEJ;YACE,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;KACtD;AACH,CAAC;AAED,MAAM,UAAU,4BAA4B,CAC1C,uBAAgD;IAEhD,IAAI,CAAC,uBAAuB,EAAE;QAC5B,OAAO,SAAS,CAAC;KAClB;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;QAC1C,+FAA+F;QAC/F,sFAAsF;QACtF,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;IACnD,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;YACnC,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;SAChD;QACD,MAAM,IAAI,GAA0B;YAClC,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;YACd,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACnF,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC5C;aAAM;YACL,YAAY,CAAC,IAAI,CAAC;gBAChB,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;aACd,CAAC,CAAC;SACJ;KACF;IACD,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,gBAAgB,CAAI,KAAQ,EAAE,UAA2B;IACtE,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;IACvC,OAAO,KAAK,CAAC;AACf,CAAC;AAED,MAAM,UAAU,yBAAyB,CACvC,iBAAqC;IAErC,OAAO,iBAAiB,CAAC,CAAC,CAAC,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;AAClG,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAc;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;QAChB,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;KAC1C;SAAM;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;KACtB;AACH,CAAC;AAED,MAAM,UAAU,qCAAqC,CACnD,gBAA8C;IAE9C,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,MAAM,UAAU,0CAA0C,CACxD,gBAAmD;;IAEnD,uCACK,gBAAgB,KACnB,OAAO,EAAE;YACP,YAAY,EAAE,MAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,EAAE,EAAE;gBAC9E,MAAM,UAAU,GAAoB;oBAClC,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;iBAChD,CAAC;gBACF,OAAO,UAAU,CAAC;YACpB,CAAC,CAAC;YACF,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,EAAE,EAAE;gBACpE,MAAM,QAAQ,mCACT,eAAe,KAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;gBACF,OAAO,QAAQ,CAAC;YAClB,CAAC,CAAC;SACH,IACD;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,KAAa;IACvC,IAAI,MAAM,EAAE;QACV,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;KACrC;SAAM;QACL,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;SACnC;QACD,OAAO,GAAG,CAAC;KACZ;AACH,CAAC;AAED,SAAS,YAAY,CAAC,OAAY;IAChC,IAAI,OAAO,KAAK,SAAS;QAAE,OAAO,SAAS,CAAC;IAC5C,IAAI,OAAO,KAAK,MAAM;QAAE,OAAO,IAAI,CAAC;IACpC,IAAI,OAAO,KAAK,OAAO;QAAE,OAAO,KAAK,CAAC;IACtC,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB;IACvC,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;QACxE,OAAO;YACL,OAAO,EAAE,YAAY,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC;YACpD,OAAO,EAAE,aAAa,CAAC,GAAG,CAAW;SACtC,CAAC;KACH;SAAM;QACL,OAAO;YACL,OAAO,EAAE,KAAK;YACd,OAAO,EAAE,aAAuB;SACjC,CAAC;KACH;AACH,CAAC;AAED,SAAS,mBAAmB,CAAC,mBAAwB;IACnD,MAAM,cAAc,GAAG,mBAAmB,CAAC;IAC3C,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;QACpF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;QACvF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,MAAM,CAAC,EAAE;QAC/B,cAAc,CAAC,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAW,CAAC;QAC5D,OAAO,cAAc,CAAC,MAAM,CAAC,CAAC;KAC/B;IAED,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;QAC3F,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;KACzC;IAED,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;QACvC,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;QAC3E,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;KACvC;IAED,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;QAC3C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;QACnF,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;KAC3C;IAED,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;QAC3C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;QACnF,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;KAC3C;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,UAAU,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAC7F,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;QAC9C,cAAc,CAAC,kBAAkB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;QACzF,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;KAC9C;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,eAAe,CAAW,CAAC;QAC7E,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,kBAAkB,GAAG,UAAU,CAC5C,mBAAmB,CAAC,2BAA2B,CAAW,CAC3D,CAAC;QACF,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;KACpD;IAED,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,mBAAmB,CAAC,UAAU,CAAa,CAAC;QACtE,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;KACnC;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,aAAa,CAAoB,CAAC;QACnF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;QAChF,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAsB,CAAC;QACzF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,MAAM,GAAG,mBAAmB,CAAC,QAAQ,CAAW,CAAC;QAChE,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;KACjC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;QAChF,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAW,CAAC;QACxE,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;QACvC,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;QAC5E,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;KACvC;IAED,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,eAAe,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,oBAAoB,CAAW,CAAC,CAAC;QAC/F,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;KAC7C;IAED,IAAI,mBAAmB,CAAC,uBAAuB,CAAC,EAAE;QAChD,cAAc,CAAC,qBAAqB,GAAG,mBAAmB,CAAC,uBAAuB,CAAW,CAAC;QAC9F,OAAO,cAAc,CAAC,uBAAuB,CAAC,CAAC;KAChD;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;QAC9C,cAAc,CAAC,mBAAmB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;QAC1F,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;KAC9C;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAClF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,UAAU,CAChD,mBAAmB,CAAC,wBAAwB,CAAW,CACxD,CAAC;QACF,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACjD;IAED,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;QACrC,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAe,CAAC;QAC5E,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;KACrC;IAED,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,kBAAkB,GAAG,YAAY,CAAC,mBAAmB,CAAC,oBAAoB,CAAC,CAAC,CAAC;QAC5F,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;KAC7C;IAED,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAkB,CAAC;QACrF,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;KACxC;IAED,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,yBAAyB,GAAG,mBAAmB,CAC5D,2BAA2B,CAClB,CAAC;QACZ,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;KACpD;IAED,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,iBAAiB,CAAW,CAAC;QAClF,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;KAC1C;IAED,IAAI,mBAAmB,CAAC,sBAAsB,CAAC,EAAE;QAC/C,cAAc,CAAC,mBAAmB,GAAG,IAAI,IAAI,CAC3C,mBAAmB,CAAC,sBAAsB,CAAW,CACtD,CAAC;QACF,OAAO,cAAc,CAAC,sBAAsB,CAAC,CAAC;KAC/C;IAED,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,UAAU,CAAW,CAAC,CAAC;QAChF,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;KACnC;IAED,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;QAClF,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;KACtC;IAED,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,QAAQ,GAAG,YAAY,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC;QACtE,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;KACjC;IAED,IAAI,mBAAmB,CAAC,mBAAmB,CAAC,EAAE;QAC5C,cAAc,CAAC,iBAAiB,GAAG,mBAAmB,CACpD,mBAAmB,CACC,CAAC;QACvB,OAAO,cAAc,CAAC,mBAAmB,CAAC,CAAC;KAC5C;IAED,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,cAAc,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;QAC1F,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;KACzC;IAED,IAAI,mBAAmB,CAAC,6BAA6B,CAAC,EAAE;QACtD,cAAc,CAAC,2BAA2B,GAAG,IAAI,IAAI,CACnD,mBAAmB,CAAC,6BAA6B,CAAW,CAC7D,CAAC;QACF,OAAO,cAAc,CAAC,6BAA6B,CAAC,CAAC;KACtD;IAED,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,mBAAmB,CACzD,wBAAwB,CACK,CAAC;QAChC,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;KACjD;IAED,IAAI,mBAAmB,CAAC,WAAW,CAAC,EAAE;QACpC,cAAc,CAAC,SAAS,GAAG,YAAY,CAAC,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC;QAC1E,OAAO,cAAc,CAAC,WAAW,CAAC,CAAC;KACpC;IAED,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,SAAS,aAAa,CAAC,SAAc;IACnC,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,QAAQ,CAAC,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC;IACnE,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE9B,QAAQ,CAAC,IAAI,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;IACjD,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;IACxB,QAAQ,CAAC,OAAO,GAAG,YAAY,CAAC,SAAS,CAAC,SAAS,CAAC,CAAE,CAAC;IACvD,OAAO,QAAQ,CAAC,SAAS,CAAC,CAAC;IAE3B,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;QACzB,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAW,CAAC;QACpD,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;KAC7B;IAED,IAAI,SAAS,CAAC,WAAW,CAAC,EAAE;QAC1B,QAAQ,CAAC,SAAS,GAAG,SAAS,CAAC,WAAW,CAAW,CAAC;QACtD,OAAO,QAAQ,CAAC,WAAW,CAAC,CAAC;KAC9B;IAED,IAAI,SAAS,CAAC,kBAAkB,CAAC,EAAE;QACjC,QAAQ,CAAC,gBAAgB,GAAG,YAAY,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC,CAAC;QACxE,OAAO,QAAQ,CAAC,kBAAkB,CAAC,CAAC;KACrC;IAED,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;QACzB,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAC,CAAC;QAC1C,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;KAC7B;IAED,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;QACrB,QAAQ,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;QACrD,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;KACzB;IAED,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;QAC3B,QAAQ,CAAC,yBAAyB,GAAG,SAAS,CAAC,YAAY,CAAC,CAAC;QAC7D,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;KAC/B;IAED,IAAI,SAAS,CAAC,iBAAiB,CAAC,EAAE;QAChC,QAAQ,CAAC,eAAe,GAAG,YAAY,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC,CAAC;QACtE,OAAO,QAAQ,CAAC,iBAAiB,CAAC,CAAC;KACpC;IACD,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,eAAe,CAAC,eAAoB;IAC3C,OAAO;QACL,IAAI,EAAE,aAAa,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;KAC7C,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,YAAiB;IACrC,OAAO;QACL,GAAG,EAAE,YAAY,CAAC,KAAK,CAAC;QACxB,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC;KAC7B,CAAC;AACJ,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB;IACvC,IACE,aAAa,KAAK,SAAS;QAC3B,aAAa,CAAC,QAAQ,CAAC,KAAK,SAAS;QACrC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,EAC5C;QACA,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,UAAU,GAAG,EAAE,CAAC;IACtB,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,YAAY,KAAK,EAAE;QACnD,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,YAAiB,EAAE,EAAE;YAC3D,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC;QAC9C,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;KAC/D;IAED,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC;AACpC,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,cAAqB;IACpD,MAAM,SAAS,GAAG,EAAE,CAAC;IAErB,IAAI,cAAc,YAAY,KAAK,EAAE;QACnC,cAAc,CAAC,OAAO,CAAC,CAAC,SAAc,EAAE,EAAE;YACxC,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,CAAC;QAC3C,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC,CAAC;KAC/C;IAED,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,iBAAwB;IAC1D,MAAM,YAAY,GAAG,EAAE,CAAC;IAExB,IAAI,iBAAiB,YAAY,KAAK,EAAE;QACtC,iBAAiB,CAAC,OAAO,CAAC,CAAC,eAAoB,EAAE,EAAE;YACjD,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,CAAC,CAAC;QACtD,CAAC,CAAC,CAAC;KACJ;SAAM;QACL,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC,CAAC;KACvD;IAED,OAAO,YAAY,CAAC;AACtB,CAAC;AAED,MAAM,SAAS,CAAC,CAAC,yBAAyB,CACxC,oBAA4D;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;QAAE,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;QAAE,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;QAC/E,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;gBACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;gBACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;gBAClC,OAAO,EAAE,KAAK;aACf,CAAC;YACF,EAAE,cAAc,CAAC;SAClB;aAAM;YACL,MAAM;gBACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;gBACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;gBACpC,OAAO,EAAE,IAAI;aACd,CAAC;YACF,EAAE,eAAe,CAAC;SACnB;KACF;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;YACJ,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;YACtC,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;YAClC,OAAO,EAAE,KAAK;SACf,CAAC;KACH;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;YACJ,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;YACxC,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;YACpC,OAAO,EAAE,IAAI;SACd,CAAC;KACH;AACH,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n BlobItemInternal,\n BlobPrefix,\n BlobType,\n LeaseStatusType,\n LeaseStateType,\n LeaseDurationType,\n CopyStatusType,\n AccessTier,\n ArchiveStatus,\n RehydratePriority,\n BlobImmutabilityPolicyMode,\n BlobTag,\n PageRange,\n ClearRange,\n BlobPropertiesInternal,\n} from \"../generated/src/models\";\nimport { DevelopmentConnectionString, HeaderConstants, URLConstants } from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n const accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nfunction decodeBase64String(value: string): Uint8Array {\n if (isNode) {\n return Buffer.from(value, \"base64\");\n } else {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n }\n}\n\nfunction ParseBoolean(content: any) {\n if (content === undefined) return undefined;\n if (content === \"true\") return true;\n if (content === \"false\") return false;\n return undefined;\n}\n\nfunction ParseBlobName(blobNameInXML: any): BlobName {\n if (blobNameInXML[\"$\"] !== undefined && blobNameInXML[\"#\"] !== undefined) {\n return {\n encoded: ParseBoolean(blobNameInXML[\"$\"][\"Encoded\"]),\n content: blobNameInXML[\"#\"] as string,\n };\n } else {\n return {\n encoded: false,\n content: blobNameInXML as string,\n };\n }\n}\n\nfunction ParseBlobProperties(blobPropertiesInXML: any): BlobPropertiesInternal {\n const blobProperties = blobPropertiesInXML;\n if (blobPropertiesInXML[\"Creation-Time\"]) {\n blobProperties.createdOn = new Date(blobPropertiesInXML[\"Creation-Time\"] as string);\n delete blobProperties[\"Creation-Time\"];\n }\n\n if (blobPropertiesInXML[\"Last-Modified\"]) {\n blobProperties.lastModified = new Date(blobPropertiesInXML[\"Last-Modified\"] as string);\n delete blobProperties[\"Last-Modified\"];\n }\n\n if (blobPropertiesInXML[\"Etag\"]) {\n blobProperties.etag = blobPropertiesInXML[\"Etag\"] as string;\n delete blobProperties[\"Etag\"];\n }\n\n if (blobPropertiesInXML[\"Content-Length\"]) {\n blobProperties.contentLength = parseFloat(blobPropertiesInXML[\"Content-Length\"] as string);\n delete blobProperties[\"Content-Length\"];\n }\n\n if (blobPropertiesInXML[\"Content-Type\"]) {\n blobProperties.contentType = blobPropertiesInXML[\"Content-Type\"] as string;\n delete blobProperties[\"Content-Type\"];\n }\n\n if (blobPropertiesInXML[\"Content-Encoding\"]) {\n blobProperties.contentEncoding = blobPropertiesInXML[\"Content-Encoding\"] as string;\n delete blobProperties[\"Content-Encoding\"];\n }\n\n if (blobPropertiesInXML[\"Content-Language\"]) {\n blobProperties.contentLanguage = blobPropertiesInXML[\"Content-Language\"] as string;\n delete blobProperties[\"Content-Language\"];\n }\n\n if (blobPropertiesInXML[\"Content-MD5\"]) {\n blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML[\"Content-MD5\"] as string);\n delete blobProperties[\"Content-MD5\"];\n }\n\n if (blobPropertiesInXML[\"Content-Disposition\"]) {\n blobProperties.contentDisposition = blobPropertiesInXML[\"Content-Disposition\"] as string;\n delete blobProperties[\"Content-Disposition\"];\n }\n\n if (blobPropertiesInXML[\"Cache-Control\"]) {\n blobProperties.cacheControl = blobPropertiesInXML[\"Cache-Control\"] as string;\n delete blobProperties[\"Cache-Control\"];\n }\n\n if (blobPropertiesInXML[\"x-ms-blob-sequence-number\"]) {\n blobProperties.blobSequenceNumber = parseFloat(\n blobPropertiesInXML[\"x-ms-blob-sequence-number\"] as string\n );\n delete blobProperties[\"x-ms-blob-sequence-number\"];\n }\n\n if (blobPropertiesInXML[\"BlobType\"]) {\n blobProperties.blobType = blobPropertiesInXML[\"BlobType\"] as BlobType;\n delete blobProperties[\"BlobType\"];\n }\n\n if (blobPropertiesInXML[\"LeaseStatus\"]) {\n blobProperties.leaseStatus = blobPropertiesInXML[\"LeaseStatus\"] as LeaseStatusType;\n delete blobProperties[\"LeaseStatus\"];\n }\n\n if (blobPropertiesInXML[\"LeaseState\"]) {\n blobProperties.leaseState = blobPropertiesInXML[\"LeaseState\"] as LeaseStateType;\n delete blobProperties[\"LeaseState\"];\n }\n\n if (blobPropertiesInXML[\"LeaseDuration\"]) {\n blobProperties.leaseDuration = blobPropertiesInXML[\"LeaseDuration\"] as LeaseDurationType;\n delete blobProperties[\"LeaseDuration\"];\n }\n\n if (blobPropertiesInXML[\"CopyId\"]) {\n blobProperties.copyId = blobPropertiesInXML[\"CopyId\"] as string;\n delete blobProperties[\"CopyId\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatus\"]) {\n blobProperties.copyStatus = blobPropertiesInXML[\"CopyStatus\"] as CopyStatusType;\n delete blobProperties[\"CopyStatus\"];\n }\n\n if (blobPropertiesInXML[\"CopySource\"]) {\n blobProperties.copySource = blobPropertiesInXML[\"CopySource\"] as string;\n delete blobProperties[\"CopySource\"];\n }\n\n if (blobPropertiesInXML[\"CopyProgress\"]) {\n blobProperties.copyProgress = blobPropertiesInXML[\"CopyProgress\"] as string;\n delete blobProperties[\"CopyProgress\"];\n }\n\n if (blobPropertiesInXML[\"CopyCompletionTime\"]) {\n blobProperties.copyCompletedOn = new Date(blobPropertiesInXML[\"CopyCompletionTime\"] as string);\n delete blobProperties[\"CopyCompletionTime\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatusDescription\"]) {\n blobProperties.copyStatusDescription = blobPropertiesInXML[\"CopyStatusDescription\"] as string;\n delete blobProperties[\"CopyStatusDescription\"];\n }\n\n if (blobPropertiesInXML[\"ServerEncrypted\"]) {\n blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML[\"ServerEncrypted\"]);\n delete blobProperties[\"ServerEncrypted\"];\n }\n\n if (blobPropertiesInXML[\"IncrementalCopy\"]) {\n blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML[\"IncrementalCopy\"]);\n delete blobProperties[\"IncrementalCopy\"];\n }\n\n if (blobPropertiesInXML[\"DestinationSnapshot\"]) {\n blobProperties.destinationSnapshot = blobPropertiesInXML[\"DestinationSnapshot\"] as string;\n delete blobProperties[\"DestinationSnapshot\"];\n }\n\n if (blobPropertiesInXML[\"DeletedTime\"]) {\n blobProperties.deletedOn = new Date(blobPropertiesInXML[\"DeletedTime\"] as string);\n delete blobProperties[\"DeletedTime\"];\n }\n\n if (blobPropertiesInXML[\"RemainingRetentionDays\"]) {\n blobProperties.remainingRetentionDays = parseFloat(\n blobPropertiesInXML[\"RemainingRetentionDays\"] as string\n );\n delete blobProperties[\"RemainingRetentionDays\"];\n }\n\n if (blobPropertiesInXML[\"AccessTier\"]) {\n blobProperties.accessTier = blobPropertiesInXML[\"AccessTier\"] as AccessTier;\n delete blobProperties[\"AccessTier\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierInferred\"]) {\n blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML[\"AccessTierInferred\"]);\n delete blobProperties[\"AccessTierInferred\"];\n }\n\n if (blobPropertiesInXML[\"ArchiveStatus\"]) {\n blobProperties.archiveStatus = blobPropertiesInXML[\"ArchiveStatus\"] as ArchiveStatus;\n delete blobProperties[\"ArchiveStatus\"];\n }\n\n if (blobPropertiesInXML[\"CustomerProvidedKeySha256\"]) {\n blobProperties.customerProvidedKeySha256 = blobPropertiesInXML[\n \"CustomerProvidedKeySha256\"\n ] as string;\n delete blobProperties[\"CustomerProvidedKeySha256\"];\n }\n\n if (blobPropertiesInXML[\"EncryptionScope\"]) {\n blobProperties.encryptionScope = blobPropertiesInXML[\"EncryptionScope\"] as string;\n delete blobProperties[\"EncryptionScope\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierChangeTime\"]) {\n blobProperties.accessTierChangedOn = new Date(\n blobPropertiesInXML[\"AccessTierChangeTime\"] as string\n );\n delete blobProperties[\"AccessTierChangeTime\"];\n }\n\n if (blobPropertiesInXML[\"TagCount\"]) {\n blobProperties.tagCount = parseFloat(blobPropertiesInXML[\"TagCount\"] as string);\n delete blobProperties[\"TagCount\"];\n }\n\n if (blobPropertiesInXML[\"Expiry-Time\"]) {\n blobProperties.expiresOn = new Date(blobPropertiesInXML[\"Expiry-Time\"] as string);\n delete blobProperties[\"Expiry-Time\"];\n }\n\n if (blobPropertiesInXML[\"Sealed\"]) {\n blobProperties.isSealed = ParseBoolean(blobPropertiesInXML[\"Sealed\"]);\n delete blobProperties[\"Sealed\"];\n }\n\n if (blobPropertiesInXML[\"RehydratePriority\"]) {\n blobProperties.rehydratePriority = blobPropertiesInXML[\n \"RehydratePriority\"\n ] as RehydratePriority;\n delete blobProperties[\"RehydratePriority\"];\n }\n\n if (blobPropertiesInXML[\"LastAccessTime\"]) {\n blobProperties.lastAccessedOn = new Date(blobPropertiesInXML[\"LastAccessTime\"] as string);\n delete blobProperties[\"LastAccessTime\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"]) {\n blobProperties.immutabilityPolicyExpiresOn = new Date(\n blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"] as string\n );\n delete blobProperties[\"ImmutabilityPolicyUntilDate\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyMode\"]) {\n blobProperties.immutabilityPolicyMode = blobPropertiesInXML[\n \"ImmutabilityPolicyMode\"\n ] as BlobImmutabilityPolicyMode;\n delete blobProperties[\"ImmutabilityPolicyMode\"];\n }\n\n if (blobPropertiesInXML[\"LegalHold\"]) {\n blobProperties.legalHold = ParseBoolean(blobPropertiesInXML[\"LegalHold\"]);\n delete blobProperties[\"LegalHold\"];\n }\n\n return blobProperties;\n}\n\nfunction ParseBlobItem(blobInXML: any): BlobItemInternal {\n const blobItem = blobInXML;\n blobItem.properties = ParseBlobProperties(blobInXML[\"Properties\"]);\n delete blobItem[\"Properties\"];\n\n blobItem.name = ParseBlobName(blobInXML[\"Name\"]);\n delete blobItem[\"Name\"];\n blobItem.deleted = ParseBoolean(blobInXML[\"Deleted\"])!;\n delete blobItem[\"Deleted\"];\n\n if (blobInXML[\"Snapshot\"]) {\n blobItem.snapshot = blobInXML[\"Snapshot\"] as string;\n delete blobItem[\"Snapshot\"];\n }\n\n if (blobInXML[\"VersionId\"]) {\n blobItem.versionId = blobInXML[\"VersionId\"] as string;\n delete blobItem[\"VersionId\"];\n }\n\n if (blobInXML[\"IsCurrentVersion\"]) {\n blobItem.isCurrentVersion = ParseBoolean(blobInXML[\"IsCurrentVersion\"]);\n delete blobItem[\"IsCurrentVersion\"];\n }\n\n if (blobInXML[\"Metadata\"]) {\n blobItem.metadata = blobInXML[\"Metadata\"];\n delete blobItem[\"Metadata\"];\n }\n\n if (blobInXML[\"Tags\"]) {\n blobItem.blobTags = ParseBlobTags(blobInXML[\"Tags\"]);\n delete blobItem[\"Tags\"];\n }\n\n if (blobInXML[\"OrMetadata\"]) {\n blobItem.objectReplicationMetadata = blobInXML[\"OrMetadata\"];\n delete blobItem[\"OrMetadata\"];\n }\n\n if (blobInXML[\"HasVersionsOnly\"]) {\n blobItem.hasVersionsOnly = ParseBoolean(blobInXML[\"HasVersionsOnly\"]);\n delete blobItem[\"HasVersionsOnly\"];\n }\n return blobItem;\n}\n\nfunction ParseBlobPrefix(blobPrefixInXML: any): BlobPrefix {\n return {\n name: ParseBlobName(blobPrefixInXML[\"Name\"]),\n };\n}\n\nfunction ParseBlobTag(blobTagInXML: any): BlobTag {\n return {\n key: blobTagInXML[\"Key\"],\n value: blobTagInXML[\"Value\"],\n };\n}\n\nfunction ParseBlobTags(blobTagsInXML: any): BlobTags | undefined {\n if (\n blobTagsInXML === undefined ||\n blobTagsInXML[\"TagSet\"] === undefined ||\n blobTagsInXML[\"TagSet\"][\"Tag\"] === undefined\n ) {\n return undefined;\n }\n\n const blobTagSet = [];\n if (blobTagsInXML[\"TagSet\"][\"Tag\"] instanceof Array) {\n blobTagsInXML[\"TagSet\"][\"Tag\"].forEach((blobTagInXML: any) => {\n blobTagSet.push(ParseBlobTag(blobTagInXML));\n });\n } else {\n blobTagSet.push(ParseBlobTag(blobTagsInXML[\"TagSet\"][\"Tag\"]));\n }\n\n return { blobTagSet: blobTagSet };\n}\n\nexport function ProcessBlobItems(blobArrayInXML: any[]): BlobItemInternal[] {\n const blobItems = [];\n\n if (blobArrayInXML instanceof Array) {\n blobArrayInXML.forEach((blobInXML: any) => {\n blobItems.push(ParseBlobItem(blobInXML));\n });\n } else {\n blobItems.push(ParseBlobItem(blobArrayInXML));\n }\n\n return blobItems;\n}\n\nexport function ProcessBlobPrefixes(blobPrefixesInXML: any[]): BlobPrefix[] {\n const blobPrefixes = [];\n\n if (blobPrefixesInXML instanceof Array) {\n blobPrefixesInXML.forEach((blobPrefixInXML: any) => {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML));\n });\n } else {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML));\n }\n\n return blobPrefixes;\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js new file mode 100644 index 0000000000..788256cbf6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js @@ -0,0 +1,125 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as fs from "fs"; +import * as util from "util"; +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +export async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + stream.on("readable", () => { + if (pos >= count) { + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +export async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into a buffer. + * + * @param stream - A Node.js Readable stream + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + */ +export async function streamToBuffer3(readableStream, encoding) { + return new Promise((resolve, reject) => { + const chunks = []; + readableStream.on("data", (data) => { + chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding)); + }); + readableStream.on("end", () => { + resolve(Buffer.concat(chunks)); + }); + readableStream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +export async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +export const fsStat = util.promisify(fs.stat); +export const fsCreateReadStream = fs.createReadStream; +//# sourceMappingURL=utils.node.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map new file mode 100644 index 0000000000..7326ab5d6d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.node.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.node.js","sourceRoot":"","sources":["../../../../src/utils/utils.node.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC,CAAC,wCAAwC;IAEpE,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;gBAChB,OAAO,EAAE,CAAC;gBACV,OAAO;aACR;YAED,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,qCAAqC;YACrC,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;QACrB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,+DAA+D,GAAG,gBAAgB,KAAK,EAAE,CAC1F,CACF,CAAC;aACH;YACD,OAAO,EAAE,CAAC;QACZ,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;GAQG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB;IAEzB,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,qBAAqB;IAClC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC7C,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;YACzB,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;aACR;YACD,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;aACtC;YAED,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,EAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;aACR;YAED,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;YAC5C,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;QACtB,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;QACf,CAAC,CAAC,CAAC;QAEH,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IAC7B,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;GAMG;AACH,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,cAAqC,EACrC,QAAyB;IAEzB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACrC,MAAM,MAAM,GAAa,EAAE,CAAC;QAC5B,cAAc,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAqB,EAAE,EAAE;YAClD,MAAM,CAAC,IAAI,CAAC,IAAI,YAAY,MAAM,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC3E,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;YAC5B,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;QACjC,CAAC,CAAC,CAAC;QACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACrC,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;GAOG;AACH,MAAM,CAAC,KAAK,UAAU,qBAAqB,CACzC,EAAyB,EACzB,IAAY;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QAC3C,MAAM,EAAE,GAAG,EAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,EAAE,EAAE;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;QACd,CAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAExB,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACd,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,MAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC;AAE9C,MAAM,CAAC,MAAM,kBAAkB,GAAG,EAAE,CAAC,gBAAgB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n if (pos >= count) {\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js new file mode 100644 index 0000000000..c7ae8635f2 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js @@ -0,0 +1,8 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// This file is used as a shim of "BufferScheduler" for some browser bundlers +// when trying to bundle "BufferScheduler" +// "BufferScheduler" class is only available in Node.js runtime +export class BufferScheduler { +} +//# sourceMappingURL=BufferScheduler.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map new file mode 100644 index 0000000000..7bf77330d6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,6EAA6E;AAC7E,0CAA0C;AAC1C,+DAA+D;AAC/D,MAAM,OAAO,eAAe;CAAG","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// This file is used as a shim of \"BufferScheduler\" for some browser bundlers\n// when trying to bundle \"BufferScheduler\"\n// \"BufferScheduler\" class is only available in Node.js runtime\nexport class BufferScheduler {}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js new file mode 100644 index 0000000000..0d92285c6c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js @@ -0,0 +1,252 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { EventEmitter } from "events"; +import { PooledBuffer } from "./PooledBuffer"; +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +export class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} +//# sourceMappingURL=BufferScheduler.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map new file mode 100644 index 0000000000..a6b2161509 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BufferScheduler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferScheduler.js","sourceRoot":"","sources":["../../../../storage-common/src/BufferScheduler.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAAE,MAAM,QAAQ,CAAC;AAEtC,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAW9C;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,OAAO,eAAe;IAuF1B;;;;;;;;;;;OAWG;IACH,YACE,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB;QAlF3B;;WAEG;QACc,YAAO,GAAiB,IAAI,YAAY,EAAE,CAAC;QAO5D;;WAEG;QACK,WAAM,GAAW,CAAC,CAAC;QAE3B;;WAEG;QACK,gBAAW,GAAY,KAAK,CAAC;QAErC;;WAEG;QACK,YAAO,GAAY,KAAK,CAAC;QAEjC;;WAEG;QACK,8BAAyB,GAAW,CAAC,CAAC;QAO9C;;WAEG;QACK,eAAU,GAAW,CAAC,CAAC;QAE/B;;;;;;WAMG;QACK,wBAAmB,GAAa,EAAE,CAAC;QAE3C;;WAEG;QACK,qBAAgB,GAAW,CAAC,CAAC;QAErC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAEtC;;WAEG;QACK,aAAQ,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;YACnB,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,EAAE,CAAC,CAAC;SACpF;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;YACpB,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,EAAE,CAAC,CAAC;SACtF;QAED,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;QAC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;QACvC,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;IAC3B,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,EAAE;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;gBAC1E,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;gBAEhC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;oBACvB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;iBACvB;YACH,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAClC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;gBAC3B,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;gBACxB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YAChC,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,EAAE,EAAE;gBAC/B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;gBACpB,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,GAAG,EAAE;gBAC/B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;iBACR;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;oBAC5D,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;wBACxE,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;wBACzD,IAAI,CAAC,eAAe,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;qBAClB;yBAAM,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;qBACR;yBAAM;wBACL,OAAO,EAAE,CAAC;qBACX;iBACF;YACH,CAAC,CAAC,CAAC;QACL,CAAC,CAAC,CAAC;IACL,CAAC;IAED;;;;OAIG;IACK,oBAAoB,CAAC,IAAY;QACvC,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACpC,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;IACvC,CAAC;IAED;;;;OAIG;IACK,kCAAkC,CAAC,MAAqB;QAC9D,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC7F;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;SAC9D;QAED,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;QACrC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED;;;;;;;;OAQG;IACK,WAAW;QACjB,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC/C,IAAI,MAAoB,CAAC;YAEzB,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;gBAC5B,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;gBAChC,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;aACjD;iBAAM;gBACL,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;oBACrC,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;iBACnB;qBAAM;oBACL,gDAAgD;oBAChD,OAAO,KAAK,CAAC;iBACd;aACF;YAED,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;SAChC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED;;;OAGG;IACK,KAAK,CAAC,uBAAuB;QACnC,IAAI,MAAgC,CAAC;QACrC,GAAG;YACD,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;aACR;YAED,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;YAC/B,IAAI,MAAM,EAAE;gBACV,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;aACrC;SACF,QAAQ,MAAM,EAAE;IACnB,CAAC;IAED;;;;OAIG;IACK,KAAK,CAAC,sBAAsB,CAAC,MAAoB;QACvD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,GAAG,EAAE,CAAC,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;SACH;QAAC,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;SACR;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;QACjC,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACzB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;IAChC,CAAC;IAED;;;;OAIG;IACK,WAAW,CAAC,MAAoB;QACtC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3B,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;YAC5D,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;SACxB;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js new file mode 100644 index 0000000000..f035c027d0 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { Readable } from "stream"; +/** + * This class generates a readable stream from the data in an array of buffers. + */ +export class BuffersStream extends Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} +//# sourceMappingURL=BuffersStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map new file mode 100644 index 0000000000..b5b81872e8 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/BuffersStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BuffersStream.js","sourceRoot":"","sources":["../../../../storage-common/src/BuffersStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,QAAQ,EAAmB,MAAM,QAAQ,CAAC;AAOnD;;GAEG;AACH,MAAM,OAAO,aAAc,SAAQ,QAAQ;IAgBzC;;;;;;OAMG;IACH,YACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,YAAO,GAAP,OAAO,CAAU;QACjB,eAAU,GAAV,UAAU,CAAQ;QAI1B,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;QACnC,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;QAE3B,4DAA4D;QAC5D,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;SACjC;QACD,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;YACnC,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;SACpF;IACH,CAAC;IAED;;;;OAIG;IACI,KAAK,CAAC,IAAa;QACxB,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;YAC7C,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SACjB;QAED,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;SACnC;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;YAC3D,2DAA2D;YAC3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;YAC3E,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;YACrF,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;gBACxB,uBAAuB;gBACvB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;gBACnC,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;aACP;iBAAM;gBACL,wBAAwB;gBACxB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;oBAC/C,4DAA4D;oBAC5D,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;iBACpB;qBAAM;oBACL,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;iBACtC;gBACD,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;aAChB;SACF;QAED,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;SACtC;aAAM,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;SAC1B;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js new file mode 100644 index 0000000000..b15390541c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { BuffersStream } from "./BuffersStream"; +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +export class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} +//# sourceMappingURL=PooledBuffer.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map new file mode 100644 index 0000000000..e2e64153fb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"PooledBuffer.js","sourceRoot":"","sources":["../../../../storage-common/src/PooledBuffer.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD;;GAEG;AACH,6DAA6D;AAC7D,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;GAOG;AACH,MAAM,OAAO,YAAY;IA4CvB,YAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB;QA3CtE;;;WAGG;QACK,YAAO,GAAa,EAAE,CAAC;QAwC7B,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;QACzB,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;QAEf,WAAW;QACX,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;YAClC,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC,CAAC,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;aACvB;YACD,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;SAC5C;QAED,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;SAClC;IACH,CAAC;IA5CD;;OAEG;IACH,IAAW,IAAI;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IAyCD;;;;;;;;OAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB;QAChD,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;QAElD,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;QACrB,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;YAClC,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAC/B,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;YAC1B,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;YACD,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;gBAClC,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;aAClB;SACF;QAED,mCAAmC;QACnC,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACrB,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;YACtB,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;SAC7C;IACH,CAAC;IAED;;;OAGG;IACI,iBAAiB;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;IACpD,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js new file mode 100644 index 0000000000..3eca4771ac --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler.browser"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map new file mode 100644 index 0000000000..fb2fafa797 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-common/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,2BAA2B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler.browser\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js new file mode 100644 index 0000000000..993a263998 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export * from "./BufferScheduler"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map new file mode 100644 index 0000000000..9538bda24d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-common/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,cAAc,mBAAmB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport * from \"./BufferScheduler\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js new file mode 100644 index 0000000000..e7e5f2f0bb --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js @@ -0,0 +1,7 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const AVRO_SYNC_MARKER_SIZE = 16; +export const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +export const AVRO_CODEC_KEY = "avro.codec"; +export const AVRO_SCHEMA_KEY = "avro.schema"; +//# sourceMappingURL=AvroConstants.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map new file mode 100644 index 0000000000..663b8f2292 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroConstants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroConstants.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroConstants.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,MAAM,CAAC,MAAM,qBAAqB,GAAW,EAAE,CAAC;AAChD,MAAM,CAAC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AAC5E,MAAM,CAAC,MAAM,cAAc,GAAW,YAAY,CAAC;AACnD,MAAM,CAAC,MAAM,eAAe,GAAW,aAAa,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js new file mode 100644 index 0000000000..da66dc53cc --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js @@ -0,0 +1,311 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +export class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} +//# sourceMappingURL=AvroParser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map new file mode 100644 index 0000000000..d4a55bc2c1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroParser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroParser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroParser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAqBlC,MAAM,OAAO,UAAU;IACrB;;;;;;OAMG;IACI,MAAM,CAAC,KAAK,CAAC,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAC9E,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;SACpC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;;;;OAKG;IACK,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAChE,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;IAChB,CAAC;IAED,6DAA6D;IAC7D,8EAA8E;IAC9E,oFAAoF;IAC5E,MAAM,CAAC,KAAK,CAAC,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;QAC1B,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAClD,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;SACxB,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE,CAAC,mDAAmD;QAErG,IAAI,YAAY,EAAE;YAChB,6BAA6B;YAC7B,0CAA0C;YAC1C,aAAa,GAAG,aAAa,CAAC;YAC9B,mBAAmB,GAAG,SAAS,CAAC,CAAC,WAAW;YAC5C,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,GAAG,mBAAmB,CAAC;gBACrD,mBAAmB,IAAI,GAAG,CAAC,CAAC,SAAS;aACtC,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;gBAClE,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;aACtC;YACD,OAAO,GAAG,CAAC;SACZ;QAED,OAAO,CAAC,aAAa,IAAI,CAAC,CAAC,GAAG,CAAC,CAAC,aAAa,GAAG,CAAC,CAAC,CAAC;IACrD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACpD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,QAAQ;QAC1B,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;YACX,OAAO,IAAI,CAAC;SACb;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;YAClB,OAAO,KAAK,CAAC;SACd;aAAM;YACL,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;SAC5C;IACH,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;QAClE,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAC,sBAAsB;IACzD,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;IACjE,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC1D,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;QACtC,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACnC,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACzD,0GAA0G;QAC1G,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACpD,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;IACxB,CAAC;IAEM,MAAM,CAAC,KAAK,CAAC,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,OAA8B,EAAE,EACN,EAAE;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;QACzD,CAAC,CAAC;QAEF,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;QACnC,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;SAC7B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,MAAM,CAAC,KAAK,CAAC,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,UAAiC,EAAE;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;QACtB,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;gBACb,qBAAqB;gBACrB,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;aAChB;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBACtD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAClB;SACF;QACD,OAAO,KAAK,CAAC;IACf,CAAC;CACF;AAOD,IAAK,WAOJ;AAPD,WAAK,WAAW;IACd,gCAAiB,CAAA;IACjB,4BAAa,CAAA;IACb,8BAAe,CAAA;IACf,0BAAW,CAAA;IACX,8BAAe,CAAA;IACf,8BAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,QAOf;AAYD,IAAK,aASJ;AATD,WAAK,aAAa;IAChB,8BAAa,CAAA;IACb,oCAAmB,CAAA;IACnB,4BAAW,CAAA;IACX,8BAAa,CAAA;IACb,gCAAe,CAAA;IACf,kCAAiB,CAAA;IACjB,gCAAe,CAAA;IACf,kCAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,QASjB;AAED,MAAM,OAAgB,QAAQ;IAS5B;;OAEG;IACI,MAAM,CAAC,UAAU,CAAC,MAAuB;QAC9C,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;YAC9B,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;SAC1C;aAAM,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;YAChC,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;SACzC;aAAM;YACL,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;SAC1D;IACH,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAc;QAC5C,QAAQ,MAAM,EAAE;YACd,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;YACxD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,EAAE,CAAC,CAAC;SACrD;IACH,CAAC;IAEO,MAAM,CAAC,eAAe,CAAC,MAAa;QAC1C,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;IAC5D,CAAC;IAEO,MAAM,CAAC,gBAAgB,CAAC,MAAoB;QAClD,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;QACzB,kDAAkD;QAClD,IAAI;YACF,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;SACxC;QAAC,OAAO,GAAQ,EAAE;YACjB,+BAA+B;SAChC;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;oBAChB,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,EAAE,CAAC,CAAC;iBACjF;gBAED,gDAAgD;gBAChD,MAAM,MAAM,GAA6B,EAAE,CAAC;gBAC5C,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;oBACjC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;iBACtD;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,EAAE,CAAC,CAAC;iBAC1E;gBACD,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;oBACnB,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,EAAE,CAAC,CAAC;iBACpF;gBACD,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;gBAClB,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;oBAClB,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,EAAE,CAAC,CAAC;iBACnF;gBACD,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7D,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC,KAAK,WAAW,CAAC,KAAK,CAAC,CAAC,eAAe;YACvC;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,IAAI,OAAO,MAAM,EAAE,CAAC,CAAC;SAChE;IACH,CAAC;CACF;AAED,MAAM,iBAAkB,SAAQ,QAAQ;IAGtC,YAAY,SAAwB;QAClC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;IAC9B,CAAC;IAEM,IAAI,CACT,MAAoB,EACpB,UAAiC,EAAE;QAEnC,QAAQ,IAAI,CAAC,UAAU,EAAE;YACvB,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD;gBACE,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;SAC7C;IACH,CAAC;CACF;AAED,MAAM,YAAa,SAAQ,QAAQ;IAGjC,YAAY,OAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;IAC1B,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;IAC9B,CAAC;CACF;AAED,MAAM,aAAc,SAAQ,QAAQ;IAGlC,YAAY,KAAiB;QAC3B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;IACtB,CAAC;IAEM,KAAK,CAAC,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE;QAEnC,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACtD,CAAC;CACF;AAED,MAAM,WAAY,SAAQ,QAAQ;IAGhC,YAAY,QAAkB;QAC5B,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAEM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACnE,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,EACJ,EAAE;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;QACtC,CAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;CACF;AAED,MAAM,cAAe,SAAQ,QAAQ;IAInC,YAAY,MAAgC,EAAE,IAAY;QACxD,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;QACtB,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;IACpB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;QACjD,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;QAC/B,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;YAC9B,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;gBAC3D,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aAC7D;SACF;QACD,OAAO,MAAM,CAAC;IAChB,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise { // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => { \n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js new file mode 100644 index 0000000000..86ac741f3e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export class AvroReadable { +} +//# sourceMappingURL=AvroReadable.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map new file mode 100644 index 0000000000..2a421a4612 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadable.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadable.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadable.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAelC,MAAM,OAAgB,YAAY;CAGjC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js new file mode 100644 index 0000000000..5d58a863a1 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro blob was aborted."); +export class AvroReadableFromBlob extends AvroReadable { + constructor(blob) { + super(); + this._blob = blob; + this._position = 0; + } + get position() { + return this._position; + } + async read(size, options = {}) { + size = Math.min(size, this._blob.size - this._position); + if (size <= 0) { + return new Uint8Array(); + } + const fileReader = new FileReader(); + return new Promise((resolve, reject) => { + function cleanUp() { + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + } + function abortHandler() { + fileReader.abort(); + cleanUp(); + reject(ABORT_ERROR); + } + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + fileReader.onloadend = (ev) => { + cleanUp(); + resolve(new Uint8Array(ev.target.result)); + }; + fileReader.onerror = () => { + cleanUp(); + reject(); + }; + fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size))); + }); + } +} +//# sourceMappingURL=AvroReadableFromBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map new file mode 100644 index 0000000000..240fbb16a4 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromBlob.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromBlob.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromBlob.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,yCAAyC,CAAC,CAAC;AAE9E,MAAM,OAAO,oBAAqB,SAAQ,YAAY;IAIpD,YAAY,IAAU;QACpB,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;QAClB,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAED,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;QACnE,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC;QACxD,IAAI,IAAI,IAAI,CAAC,EAAE;YACb,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,MAAM,UAAU,GAAG,IAAI,UAAU,EAAE,CAAC;QACpC,OAAO,IAAI,OAAO,CAAa,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAEjD,SAAS,OAAO;gBACd,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBACjE;YACH,CAAC;YAED,SAAS,YAAY;gBACnB,UAAU,CAAC,KAAK,EAAE,CAAC;gBACnB,OAAO,EAAE,CAAC;gBACV,MAAM,CAAC,WAAW,CAAC,CAAC;YACtB,CAAC;YAED,IAAI,OAAO,CAAC,WAAW,EAAE;gBACvB,OAAO,CAAC,WAAW,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;aAC7D;YAED,UAAU,CAAC,SAAS,GAAG,CAAC,EAAO,EAAE,EAAE;gBACjC,OAAO,EAAE,CAAC;gBACV,OAAO,CAAC,IAAI,UAAU,CAAC,EAAE,CAAC,MAAO,CAAC,MAAM,CAAC,CAAC,CAAC;YAC7C,CAAC,CAAC;YAEF,UAAU,CAAC,OAAO,GAAG,GAAG,EAAE;gBACxB,OAAO,EAAE,CAAC;gBACV,MAAM,EAAE,CAAC;YACX,CAAC,CAAC;YAEF,UAAU,CAAC,iBAAiB,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC;QAC3F,CAAC,CAAC,CAAC;IACL,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro blob was aborted.\");\n\nexport class AvroReadableFromBlob extends AvroReadable {\n private _position: number;\n private _blob: Blob;\n\n constructor(blob: Blob) {\n super();\n this._blob = blob;\n this._position = 0;\n }\n\n public get position(): number {\n return this._position;\n }\n\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n size = Math.min(size, this._blob.size - this._position);\n if (size <= 0) {\n return new Uint8Array();\n }\n\n const fileReader = new FileReader();\n return new Promise((resolve, reject) => {\n\n function cleanUp(): void {\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n }\n\n function abortHandler(): void {\n fileReader.abort();\n cleanUp();\n reject(ABORT_ERROR);\n }\n\n if (options.abortSignal) {\n options.abortSignal.addEventListener(\"abort\", abortHandler);\n }\n\n fileReader.onloadend = (ev: any) => {\n cleanUp();\n resolve(new Uint8Array(ev.target!.result));\n };\n\n fileReader.onerror = () => {\n cleanUp();\n reject();\n };\n\n fileReader.readAsArrayBuffer(this._blob.slice(this._position, (this._position += size)));\n });\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js new file mode 100644 index 0000000000..197fbd3fef --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js @@ -0,0 +1,84 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AvroReadable } from "./AvroReadable"; +import { AbortError } from "@azure/abort-controller"; +const ABORT_ERROR = new AbortError("Reading from the avro stream was aborted."); +export class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} +//# sourceMappingURL=AvroReadableFromStream.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map new file mode 100644 index 0000000000..9cba217057 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReadableFromStream.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReadableFromStream.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,YAAY,EAA2B,MAAM,gBAAgB,CAAC;AACvE,OAAO,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAErD,MAAM,WAAW,GAAG,IAAI,UAAU,CAAC,2CAA2C,CAAC,CAAC;AAEhF,MAAM,OAAO,sBAAuB,SAAQ,YAAY;IAWtD,YAAY,QAA+B;QACzC,KAAK,EAAE,CAAC;QACR,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;QAC1B,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;IACrB,CAAC;IAXO,YAAY,CAAC,IAAqB;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;YAC5B,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;SAC1B;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAOD,IAAW,QAAQ;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACM,KAAK,CAAC,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE;;QACnE,IAAI,MAAA,OAAO,CAAC,WAAW,0CAAE,OAAO,EAAE;YAChC,MAAM,WAAW,CAAC;SACnB;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;YACZ,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,EAAE,CAAC,CAAC;SAC/D;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;SACzB;QAED,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;YAC5B,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;SAC/C;QACD,uCAAuC;QACvC,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QACxC,IAAI,KAAK,EAAE;YACT,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;YAC/B,gEAAgE;YAChE,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;SACjC;aAAM;YACL,oDAAoD;YACpD,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;gBACrC,4DAA4D;gBAC5D,MAAM,OAAO,GAAe,GAAG,EAAE;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;qBACjE;gBACH,CAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,GAAG,EAAE;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChD,IAAI,aAAa,EAAE;wBACjB,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;wBACvC,OAAO,EAAE,CAAC;wBACV,wEAAwE;wBACxE,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;qBAC3C;gBACH,CAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,GAAG,EAAE;oBACtC,OAAO,EAAE,CAAC;oBACV,MAAM,EAAE,CAAC;gBACX,CAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,GAAG,EAAE;oBACpC,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;gBACtB,CAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;iBAC9D;gBACD,2DAA2D;YAC7D,CAAC,CAAC,CAAC;SACJ;IACH,CAAC;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js new file mode 100644 index 0000000000..8421375460 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js @@ -0,0 +1,107 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncGenerator, __await } from "tslib"; +// TODO: Do a review of non-interfaces +/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */ +import "@azure/core-paging"; +import { AVRO_CODEC_KEY, AVRO_INIT_BYTES, AVRO_SCHEMA_KEY, AVRO_SYNC_MARKER_SIZE, } from "./AvroConstants"; +import { AvroParser, AvroType } from "./AvroParser"; +import { arraysEqual } from "./utils/utils.common"; +export class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return __asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield __await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield __await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield __await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield __await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield __await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield __await(result); + } + }); + } +} +//# sourceMappingURL=AvroReader.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map new file mode 100644 index 0000000000..e35d5bd985 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"AvroReader.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/AvroReader.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;;AAElC,sCAAsC;AACtC,iEAAiE;AAEjE,OAAO,oBAAoB,CAAC;AAC5B,OAAO,EACL,cAAc,EACd,eAAe,EACf,eAAe,EACf,qBAAqB,GACtB,MAAM,iBAAiB,CAAC;AACzB,OAAO,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,cAAc,CAAC;AAGpD,OAAO,EAAE,WAAW,EAAE,MAAM,sBAAsB,CAAC;AAanD,MAAM,OAAO,UAAU;IAuCrB,YACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC;QAEhC,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;QAC9B,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;QAChD,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;QAC1B,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;QAC5C,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;IACrD,CAAC;IAhCD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAGD,IAAW,WAAW;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IA2BO,KAAK,CAAC,UAAU,CAAC,UAA4B,EAAE;QACrD,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;YACzC,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;SAChD;QAED,sEAAsE;QACtE,sCAAsC;QACtC,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,iBAAiB;QACjB,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;QAC9C,IAAI,CAAC,CAAC,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;YAChE,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;SAC7C;QAED,6DAA6D;QAC7D,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QAEH,mBAAmB;QACnB,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QAE7C,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;YAC3B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;SAC1E;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;SACjC,CAAC,CAAC;QACH,oBAAoB;QACpB,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;QAElF,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YAC9C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;gBAC1C,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;aAChC;SACF;IACH,CAAC;IAEM,OAAO;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;IAChE,CAAC;IAEa,YAAY,CACxB,UAA4B,EAAE;;YAE9B,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;gBACtB,cAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;aAChC;YAED,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;gBACrB,MAAM,MAAM,GAAG,cAAM,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;iBACjC,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;gBAErB,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;oBACrC,MAAM,MAAM,GAAG,cAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;qBACjC,CAAC,CAAA,CAAC;oBAEH,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;oBACzE,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;wBAC3C,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;qBACrD;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;yBACjC,CAAC,CAAA,CAAC;qBACJ;oBAAC,OAAO,GAAQ,EAAE;wBACjB,gCAAgC;wBAChC,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;qBACjC;oBAED,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;wBACpC,oBAAoB;wBACpB,cAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;qBACnF;iBACF;gBACD,oBAAM,MAAM,CAAA,CAAC;aACd;QACH,CAAC;KAAA;CACF","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js new file mode 100644 index 0000000000..475b1d6158 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromBlob } from "./AvroReadableFromBlob"; +//# sourceMappingURL=index.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map new file mode 100644 index 0000000000..ef6000cc08 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.browser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.browser.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.browser.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,oBAAoB,EAAE,MAAM,wBAAwB,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromBlob } from \"./AvroReadableFromBlob\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js new file mode 100644 index 0000000000..15ab942a03 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js @@ -0,0 +1,6 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { AvroReader } from "./AvroReader"; +export { AvroReadable } from "./AvroReadable"; +export { AvroReadableFromStream } from "./AvroReadableFromStream"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map new file mode 100644 index 0000000000..c46e8668db --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../storage-internal-avro/src/index.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAElC,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAC;AAC9C,OAAO,EAAE,sBAAsB,EAAE,MAAM,0BAA0B,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport { AvroReader } from \"./AvroReader\";\nexport { AvroReadable } from \"./AvroReadable\";\nexport { AvroReadableFromStream } from \"./AvroReadableFromStream\";\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js new file mode 100644 index 0000000000..135cf5b22e --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} +//# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map new file mode 100644 index 0000000000..2605459340 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/utils/utils.common.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.common.js","sourceRoot":"","sources":["../../../../../storage-internal-avro/src/utils/utils.common.ts"],"names":[],"mappings":"AAAA,uCAAuC;AACvC,kCAAkC;AAOlC,MAAM,UAAU,WAAW,CAAC,CAAa,EAAE,CAAa;IACtD,IAAI,CAAC,KAAK,CAAC;QAAE,OAAO,IAAI,CAAC;IACzB,kCAAkC;IAClC,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;QAAE,OAAO,KAAK,CAAC;IACzC,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;QAAE,OAAO,KAAK,CAAC;IAExC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;YAAE,OAAO,KAAK,CAAC;KACjC;IACD,OAAO,IAAI,CAAC;AACd,CAAC","sourcesContent":["// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist/index.js b/node_modules/@azure/storage-blob/dist/index.js new file mode 100644 index 0000000000..774ddbe84c --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js @@ -0,0 +1,25339 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var coreHttp = require('@azure/core-http'); +var tslib = require('tslib'); +var coreTracing = require('@azure/core-tracing'); +var logger$1 = require('@azure/logger'); +var abortController = require('@azure/abort-controller'); +var os = require('os'); +var crypto = require('crypto'); +var stream = require('stream'); +require('@azure/core-paging'); +var coreLro = require('@azure/core-lro'); +var events = require('events'); +var fs = require('fs'); +var util = require('util'); + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", + type: { + name: "Composite", + className: "BlobServiceProperties", + modelProperties: { + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", + type: { + name: "Composite", + className: "Logging" + } + }, + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", + type: { + name: "Composite", + className: "Metrics" + } + }, + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule" + } + } + } + }, + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", + type: { + name: "String" + } + }, + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + }, + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite" + } + } + } + } +}; +const Logging = { + serializedName: "Logging", + type: { + name: "Composite", + className: "Logging", + modelProperties: { + version: { + serializedName: "Version", + required: true, + xmlName: "Version", + type: { + name: "String" + } + }, + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", + type: { + name: "Boolean" + } + }, + read: { + serializedName: "Read", + required: true, + xmlName: "Read", + type: { + name: "Boolean" + } + }, + write: { + serializedName: "Write", + required: true, + xmlName: "Write", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const RetentionPolicy = { + serializedName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + days: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number" + } + } + } + } +}; +const Metrics = { + serializedName: "Metrics", + type: { + name: "Composite", + className: "Metrics", + modelProperties: { + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", + type: { + name: "Boolean" + } + }, + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", + type: { + name: "Composite", + className: "RetentionPolicy" + } + } + } + } +}; +const CorsRule = { + serializedName: "CorsRule", + type: { + name: "Composite", + className: "CorsRule", + modelProperties: { + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", + type: { + name: "String" + } + }, + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", + type: { + name: "String" + } + }, + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", + type: { + name: "String" + } + }, + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", + type: { + name: "String" + } + }, + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", + type: { + name: "Number" + } + } + } + } +}; +const StaticWebsite = { + serializedName: "StaticWebsite", + type: { + name: "Composite", + className: "StaticWebsite", + modelProperties: { + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", + type: { + name: "Boolean" + } + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", + type: { + name: "String" + } + }, + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", + type: { + name: "String" + } + }, + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", + type: { + name: "String" + } + } + } + } +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", + type: { + name: "String" + } + }, + code: { + serializedName: "Code", + xmlName: "Code", + type: { + name: "String" + } + } + } + } +}; +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", + type: { + name: "Composite", + className: "BlobServiceStatistics", + modelProperties: { + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication" + } + } + } + } +}; +const GeoReplication = { + serializedName: "GeoReplication", + type: { + name: "Composite", + className: "GeoReplication", + modelProperties: { + status: { + serializedName: "Status", + required: true, + xmlName: "Status", + type: { + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"] + } + }, + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListContainersSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", + type: { + name: "Composite", + className: "ContainerItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + version: { + serializedName: "Version", + xmlName: "Version", + type: { + name: "String" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "ContainerProperties" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + } + } + } +}; +const ContainerProperties = { + serializedName: "ContainerProperties", + type: { + name: "Composite", + className: "ContainerProperties", + modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", + type: { + name: "String" + } + }, + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", + type: { + name: "Boolean" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", + type: { + name: "Boolean" + } + } + } + } +}; +const KeyInfo = { + serializedName: "KeyInfo", + type: { + name: "Composite", + className: "KeyInfo", + modelProperties: { + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String" + } + } + } + } +}; +const UserDelegationKey = { + serializedName: "UserDelegationKey", + type: { + name: "Composite", + className: "UserDelegationKey", + modelProperties: { + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", + type: { + name: "String" + } + }, + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", + type: { + name: "String" + } + }, + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", + type: { + name: "String" + } + }, + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", + type: { + name: "String" + } + }, + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", + type: { + name: "String" + } + }, + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "FilterBlobSegment", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + where: { + serializedName: "Where", + required: true, + xmlName: "Where", + type: { + name: "String" + } + }, + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + type: { + name: "String" + } + }, + tags: { + serializedName: "Tags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + } + } + } +}; +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags", + modelProperties: { + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag" + } + } + } + } + } + } +}; +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", + type: { + name: "Composite", + className: "BlobTag", + modelProperties: { + key: { + serializedName: "Key", + required: true, + xmlName: "Key", + type: { + name: "String" + } + }, + value: { + serializedName: "Value", + required: true, + xmlName: "Value", + type: { + name: "String" + } + } + } + } +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", + type: { + name: "String" + } + }, + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy" + } + } + } + } +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", + type: { + name: "String" + } + }, + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", + type: { + name: "String" + } + }, + permissions: { + serializedName: "Permission", + xmlName: "Permission", + type: { + name: "String" + } + } + } + } +}; +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsFlatSegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobFlatListSegment", + modelProperties: { + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", + type: { + name: "Composite", + className: "BlobItemInternal", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + }, + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", + type: { + name: "Boolean" + } + }, + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", + type: { + name: "String" + } + }, + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", + type: { + name: "Boolean" + } + }, + properties: { + serializedName: "Properties", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal" + } + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", + type: { + name: "Composite", + className: "BlobTags" + } + }, + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", + type: { + name: "Composite", + className: "BlobPropertiesInternal", + modelProperties: { + createdOn: { + serializedName: "Creation-Time", + xmlName: "Creation-Time", + type: { + name: "DateTimeRfc1123" + } + }, + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", + type: { + name: "String" + } + }, + contentLength: { + serializedName: "Content-Length", + xmlName: "Content-Length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "Content-Type", + xmlName: "Content-Type", + type: { + name: "String" + } + }, + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + }, + contentDisposition: { + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "BlobType", + xmlName: "BlobType", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", + type: { + name: "String" + } + }, + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", + type: { + name: "String" + } + }, + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", + type: { + name: "Boolean" + } + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", + type: { + name: "String" + } + }, + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", + type: { + name: "DateTimeRfc1123" + } + }, + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", + type: { + name: "Number" + } + }, + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + }, + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", + type: { + name: "Enum", + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool" + ] + } + }, + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", + type: { + name: "DateTimeRfc1123" + } + }, + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", + type: { + name: "Boolean" + } + } + } + } +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, + type: { + name: "String" + } + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String" + } + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String" + } + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number" + } + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", + type: { + name: "String" + } + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment" + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix" + } + } + } + }, + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal" + } + } + } + } + } + } +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "Composite", + className: "BlobName" + } + } + } + } +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + }, + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", + type: { + name: "Sequence", + element: { + type: { + name: "String" + } + } + } + } + } + } +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + }, + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block" + } + } + } + } + } + } +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", + type: { + name: "String" + } + }, + size: { + serializedName: "Size", + required: true, + xmlName: "Size", + type: { + name: "Number" + } + } + } + } +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange" + } + } + } + }, + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange" + } + } + } + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String" + } + } + } + } +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", + type: { + name: "Number" + } + }, + end: { + serializedName: "End", + required: true, + xmlName: "End", + type: { + name: "Number" + } + } + } + } +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", + type: { + name: "String" + } + }, + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", + type: { + name: "String" + } + }, + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + }, + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", + type: { + name: "Composite", + className: "QuerySerialization" + } + } + } + } +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", + type: { + name: "Composite", + className: "QueryFormat" + } + } + } + } +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "Enum", + allowedValues: ["delimited", "json", "arrow", "parquet"] + } + }, + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration" + } + }, + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration" + } + }, + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration" + } + }, + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", + type: { + name: "any" + } + } + } + } +}; +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", + type: { + name: "Composite", + className: "DelimitedTextConfiguration", + modelProperties: { + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", + type: { + name: "String" + } + }, + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", + type: { + name: "String" + } + }, + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + }, + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", + type: { + name: "String" + } + }, + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", + type: { + name: "Boolean" + } + } + } + } +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String" + } + } + } + } +}; +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", + type: { + name: "Composite", + className: "ArrowConfiguration", + modelProperties: { + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField" + } + } + } + } + } + } +}; +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", + type: { + name: "Composite", + className: "ArrowField", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", + type: { + name: "String" + } + }, + name: { + serializedName: "Name", + xmlName: "Name", + type: { + name: "String" + } + }, + precision: { + serializedName: "Precision", + xmlName: "Precision", + type: { + name: "Number" + } + }, + scale: { + serializedName: "Scale", + xmlName: "Scale", + type: { + name: "Number" + } + } + } + } +}; +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetStatisticsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ServiceListContainersSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetUserDelegationKeyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + }, + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ServiceGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", + type: { + name: "Composite", + className: "ContainerCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", + type: { + name: "Composite", + className: "ContainerCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesHeaders", + modelProperties: { + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean" + } + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean" + } + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", + type: { + name: "Composite", + className: "ContainerDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", + type: { + name: "Composite", + className: "ContainerDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyHeaders", + modelProperties: { + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSetAccessPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", + type: { + name: "Composite", + className: "ContainerRestoreHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRestoreExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", + type: { + name: "Composite", + className: "ContainerRenameHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenameExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + } + } + } +}; +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ContainerSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobFlatSegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentHeaders", + modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", + type: { + name: "Composite", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "ContainerGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", + type: { + name: "Composite", + className: "BlobDownloadHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", + type: { + name: "Composite", + className: "BlobDownloadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String" + } + }, + objectReplicationRules: { + serializedName: "x-ms-or", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-or-" + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", + type: { + name: "Boolean" + } + }, + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", + type: { + name: "String" + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String" + } + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean" + } + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String" + } + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean" + } + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number" + } + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetPropertiesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", + type: { + name: "Composite", + className: "BlobDeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", + type: { + name: "Composite", + className: "BlobUndeleteExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetExpiryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetHttpHeadersExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } + } + } +}; +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", + type: { + name: "Composite", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } + } + } +}; +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetLegalHoldExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetMetadataExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS" + ] + } + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage" + ] + } + } + } + } +}; +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + metadata: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + } + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String" + } + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String" + } + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String" + } + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] + } + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123" + } + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String" + } + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"] + } + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken" + ] + } + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"] + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } + } + } +}; +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String" + } + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"] + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String" + } + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean" + } + } + } + } +}; +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray" + } + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean" + } + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123" + } + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String" + } + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String" + } + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + BlobServiceProperties: BlobServiceProperties, + Logging: Logging, + RetentionPolicy: RetentionPolicy, + Metrics: Metrics, + CorsRule: CorsRule, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + BlobServiceStatistics: BlobServiceStatistics, + GeoReplication: GeoReplication, + ListContainersSegmentResponse: ListContainersSegmentResponse, + ContainerItem: ContainerItem, + ContainerProperties: ContainerProperties, + KeyInfo: KeyInfo, + UserDelegationKey: UserDelegationKey, + FilterBlobSegment: FilterBlobSegment, + FilterBlobItem: FilterBlobItem, + BlobTags: BlobTags, + BlobTag: BlobTag, + SignedIdentifier: SignedIdentifier, + AccessPolicy: AccessPolicy, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + BlobFlatListSegment: BlobFlatListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPropertiesInternal: BlobPropertiesInternal, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobPrefix: BlobPrefix, + BlockLookupList: BlockLookupList, + BlockList: BlockList, + Block: Block, + PageList: PageList, + PageRange: PageRange, + ClearRange: ClearRange, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + QueryFormat: QueryFormat, + DelimitedTextConfiguration: DelimitedTextConfiguration, + JsonTextConfiguration: JsonTextConfiguration, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties +}; +const accept = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const url = { + parameterPath: "url", + mapper: { + serializedName: "url", + required: true, + xmlName: "url", + type: { + name: "String" + } + }, + skipEncoding: true +}; +const restype = { + parameterPath: "restype", + mapper: { + defaultValue: "service", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const comp = { + parameterPath: "comp", + mapper: { + defaultValue: "properties", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], + mapper: { + constraints: { + InclusiveMinimum: 0 + }, + serializedName: "timeout", + xmlName: "timeout", + type: { + name: "Number" + } + } +}; +const version = { + parameterPath: "version", + mapper: { + defaultValue: "2021-08-06", + isConstant: true, + serializedName: "x-ms-version", + type: { + name: "String" + } + } +}; +const requestId = { + parameterPath: ["options", "requestId"], + mapper: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + } +}; +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp1 = { + parameterPath: "comp", + mapper: { + defaultValue: "stats", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp2 = { + parameterPath: "comp", + mapper: { + defaultValue: "list", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prefix = { + parameterPath: ["options", "prefix"], + mapper: { + serializedName: "prefix", + xmlName: "prefix", + type: { + name: "String" + } + } +}; +const marker = { + parameterPath: ["options", "marker"], + mapper: { + serializedName: "marker", + xmlName: "marker", + type: { + name: "String" + } + } +}; +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], + mapper: { + constraints: { + InclusiveMinimum: 1 + }, + serializedName: "maxresults", + xmlName: "maxresults", + type: { + name: "Number" + } + } +}; +const include = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo +}; +const comp3 = { + parameterPath: "comp", + mapper: { + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const restype1 = { + parameterPath: "restype", + mapper: { + defaultValue: "account", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const body = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const comp4 = { + parameterPath: "comp", + mapper: { + defaultValue: "batch", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const contentLength = { + parameterPath: "contentLength", + mapper: { + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", + type: { + name: "Number" + } + } +}; +const multipartContentType = { + parameterPath: "multipartContentType", + mapper: { + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", + type: { + name: "String" + } + } +}; +const comp5 = { + parameterPath: "comp", + mapper: { + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const where = { + parameterPath: ["options", "where"], + mapper: { + serializedName: "where", + xmlName: "where", + type: { + name: "String" + } + } +}; +const restype2 = { + parameterPath: "restype", + mapper: { + defaultValue: "container", + isConstant: true, + serializedName: "restype", + type: { + name: "String" + } + } +}; +const metadata = { + parameterPath: ["options", "metadata"], + mapper: { + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } } + }, + headerCollectionPrefix: "x-ms-meta-" + } +}; +const access = { + parameterPath: ["options", "access"], + mapper: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"] + } + } +}; +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope" + ], + mapper: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String" + } + } +}; +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride" + ], + mapper: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean" + } + } +}; +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], + mapper: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], + mapper: { + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], + mapper: { + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const comp6 = { + parameterPath: "comp", + mapper: { + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp7 = { + parameterPath: "comp", + mapper: { + defaultValue: "acl", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const containerAcl = { + parameterPath: ["options", "containerAcl"], + mapper: { + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier" + } + } + } + } +}; +const comp8 = { + parameterPath: "comp", + mapper: { + defaultValue: "undelete", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], + mapper: { + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", + type: { + name: "String" + } + } +}; +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], + mapper: { + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", + type: { + name: "String" + } + } +}; +const comp9 = { + parameterPath: "comp", + mapper: { + defaultValue: "rename", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const sourceContainerName = { + parameterPath: "sourceContainerName", + mapper: { + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", + type: { + name: "String" + } + } +}; +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], + mapper: { + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", + type: { + name: "String" + } + } +}; +const comp10 = { + parameterPath: "comp", + mapper: { + defaultValue: "lease", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const action = { + parameterPath: "action", + mapper: { + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const duration = { + parameterPath: ["options", "duration"], + mapper: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Number" + } + } +}; +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], + mapper: { + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String" + } + } +}; +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number" + } + } +}; +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String" + } + } +}; +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String" + } + } +}; +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions" + ] + } + } + } + }, + collectionFormat: coreHttp.QueryCollectionFormat.Csv +}; +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String" + } + } +}; +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String" + } + } +}; +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String" + } + } +}; +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean" + } + } +}; +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean" + } + } +}; +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String" + } + } +}; +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String" + } + } +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String" + } + } +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String" + } + } +}; +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String" + } + } +}; +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String" + } + } +}; +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"] + } + } +}; +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String" + } + } +}; +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String" + } + } +}; +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String" + } + } +}; +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String" + } + } +}; +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String" + } + } +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray" + } + } +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String" + } + } +}; +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String" + } + } +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String" + } + } +}; +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123" + } + } +}; +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"] + } + } +}; +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + } +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"] + } + } +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince" + ], + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123" + } + } +}; +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String" + } + } +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch" + ], + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String" + } + } +}; +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String" + } + } +}; +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String" + } + } +}; +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean" + } + } +}; +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean" + } + } +}; +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String" + } + } +}; +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray" + } + } +}; +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String" + } + } +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"] + } + } +}; +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String" + } + } +}; +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String" + } + } +}; +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive" + ] + } + } +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray" + } + } +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number" + } + } +}; +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number" + } + } +}; +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String" + } + } +}; +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream" + } + } +}; +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String" + } + } +}; +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number" + } + } +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo" + ], + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number" + } + } +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String" + } + } +}; +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String" + } + } +}; +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray" + } + } +}; +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String" + } + } +}; +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String" + } + } +}; +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String" + } + } +}; +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"] + } + } +}; +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number" + } + } +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition" + ], + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number" + } + } +}; +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String" + } + } +}; +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String" + } + } +}; +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean" + } + } +}; +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String" + } + } +}; +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String" + } + } +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"] + } + } +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Service. */ +class Service { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + const operationArguments = { + blobServiceProperties, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + const operationArguments = { + keyInfo, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders + } + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders + } + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders + } + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$5 +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ServiceSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5 +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$5 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Container. */ +class Container { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + } + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); + } + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + } + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + } + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + } + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + const operationArguments = { + sourceContainerName, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + const operationArguments = { + contentLength, + multipartContentType, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + } + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + const operationArguments = { + delimiter, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); + } +} +// Operation Specifications +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" } + } + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier" + }, + headersMapper: ContainerGetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders + } + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: ContainerSubmitBatchHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders + } + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + contentLength, + multipartContentType + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4 +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$4 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a Blob. */ +class Blob$1 { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + } + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + } + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + const operationArguments = { + expiryOptions, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + } + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + } + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + } + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + } + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + const operationArguments = { + legalHold, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + } + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + const operationArguments = { + leaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + const operationArguments = { + leaseId, + proposedLeaseId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + } + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + } + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + const operationArguments = { + copyId, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + } + /** + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. + */ + setTier(tier, options) { + const operationArguments = { + tier, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + } + /** + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. + */ + query(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + } + /** + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. + */ + getTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + } + /** + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. + */ + setTags(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobDownloadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders + }, + 202: { + headersMapper: BlobSetTierHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1 + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders + } + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$3 +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse" + }, + headersMapper: BlobQueryHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders + } + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer$3 +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders + } + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18 + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a PageBlob. */ +class PageBlob { + /** + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + create(contentLength, blobContentLength, options) { + const operationArguments = { + contentLength, + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); + } + /** + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + uploadPages(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + } + /** + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. + */ + clearPages(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. + */ + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + const operationArguments = { + sourceUrl, + sourceRange, + contentLength, + range, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + } + /** + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. + */ + getPageRanges(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + } + /** + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. + */ + getPageRangesDiff(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + } + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + const operationArguments = { + blobContentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + } + /** + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. + */ + updateSequenceNumber(sequenceNumberAction, options) { + const operationArguments = { + sequenceNumberAction, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + } + /** + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyIncremental(copySource, options) { + const operationArguments = { + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo + ], + mediaType: "binary", + serializer: serializer$2 +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1 + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders + } + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction + ], + isXML: true, + serializer: xmlSerializer$2 +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource + ], + isXML: true, + serializer: xmlSerializer$2 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a AppendBlob. */ +class AppendBlob { + /** + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + create(contentLength, options) { + const operationArguments = { + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, createOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. + */ + appendBlockFromUrl(sourceUrl, contentLength, options) { + const operationArguments = { + sourceUrl, + contentLength, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + } + /** + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. + */ + seal(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition + ], + mediaType: "binary", + serializer: serializer$1 +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer$1 +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition + ], + isXML: true, + serializer: xmlSerializer$1 +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class representing a BlockBlob. */ +class BlockBlob { + /** + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + upload(contentLength, body, options) { + const operationArguments = { + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + } + /** + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + putBlobFromUrl(contentLength, copySource, options) { + const operationArguments = { + contentLength, + copySource, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + stageBlock(blockId, contentLength, body, options) { + const operationArguments = { + blockId, + contentLength, + body, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + } + /** + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. + */ + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + const operationArguments = { + blockId, + contentLength, + sourceUrl, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + } + /** + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. + */ + commitBlockList(blocks, options) { + const operationArguments = { + blocks, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + const operationArguments = { + listType, + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + } +} +// Operation Specifications +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + contentType1, + accept2, + blobType2 + ], + mediaType: "binary", + serializer +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders + } + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties + ], + isXML: true, + serializer: xmlSerializer +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders + } + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2 + ], + mediaType: "binary", + serializer +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1 + ], + isXML: true, + serializer: xmlSerializer +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders + } + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64 + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags + ], + isXML: true, + serializer: xmlSerializer +}; + +// Copyright (c) Microsoft Corporation. +/** + * The `@azure/logger` configuration for this package. + */ +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "12.11.0"; +const SERVICE_VERSION = "2021-08-06"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, +}; +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, +}; +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", +}; +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; + +// Copyright (c) Microsoft Corporation. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path || "/"; + path = escape(path); + urlParsed.setPath(path); + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; + } + } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); + } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + const accountName = getAccountNameFromUrl(blobEndpoint); + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let path = urlParsed.getPath(); + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.setPath(path); + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setQueryParameter(name, value); + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getQueryParameterValue(name); +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = coreHttp.URLBuilder.parse(url); + urlParsed.setHost(host); + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getPath(); +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + return urlParsed.getScheme(); +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = coreHttp.URLBuilder.parse(url); + const pathString = urlParsed.getPath(); + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.getQuery() || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = coreHttp.URLBuilder.parse(url).getQuery(); + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = coreHttp.URLBuilder.parse(url); + let query = urlParsed.getQuery(); + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.setQuery(query); + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); + } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); + } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); + } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); + } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; + } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); + } + return padString.slice(0, targetLength) + currentString; + } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = coreHttp.URLBuilder.parse(url); + let accountName; + try { + if (parsedUrl.getHost().split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.getHost().split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.getPath().split("/")[1]; + } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; + } + return accountName; + } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); + } +} +function isIpEndpointStyle(parsedUrl) { + if (parsedUrl.getHost() === undefined) { + return false; + } + const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port), use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; + } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); + } + } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); + } + } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; + } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); + } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); + } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); + } + } + return orProperties; +} +/** + * Attach a TokenCredential to an object. + * + * @param thing - + * @param credential - + */ +function attachCredential(thing, credential) { + thing.credential = credential; + return thing; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobProperties(blobPropertiesInXML) { + const blobProperties = blobPropertiesInXML; + if (blobPropertiesInXML["Creation-Time"]) { + blobProperties.createdOn = new Date(blobPropertiesInXML["Creation-Time"]); + delete blobProperties["Creation-Time"]; + } + if (blobPropertiesInXML["Last-Modified"]) { + blobProperties.lastModified = new Date(blobPropertiesInXML["Last-Modified"]); + delete blobProperties["Last-Modified"]; + } + if (blobPropertiesInXML["Etag"]) { + blobProperties.etag = blobPropertiesInXML["Etag"]; + delete blobProperties["Etag"]; + } + if (blobPropertiesInXML["Content-Length"]) { + blobProperties.contentLength = parseFloat(blobPropertiesInXML["Content-Length"]); + delete blobProperties["Content-Length"]; + } + if (blobPropertiesInXML["Content-Type"]) { + blobProperties.contentType = blobPropertiesInXML["Content-Type"]; + delete blobProperties["Content-Type"]; + } + if (blobPropertiesInXML["Content-Encoding"]) { + blobProperties.contentEncoding = blobPropertiesInXML["Content-Encoding"]; + delete blobProperties["Content-Encoding"]; + } + if (blobPropertiesInXML["Content-Language"]) { + blobProperties.contentLanguage = blobPropertiesInXML["Content-Language"]; + delete blobProperties["Content-Language"]; + } + if (blobPropertiesInXML["Content-MD5"]) { + blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML["Content-MD5"]); + delete blobProperties["Content-MD5"]; + } + if (blobPropertiesInXML["Content-Disposition"]) { + blobProperties.contentDisposition = blobPropertiesInXML["Content-Disposition"]; + delete blobProperties["Content-Disposition"]; + } + if (blobPropertiesInXML["Cache-Control"]) { + blobProperties.cacheControl = blobPropertiesInXML["Cache-Control"]; + delete blobProperties["Cache-Control"]; + } + if (blobPropertiesInXML["x-ms-blob-sequence-number"]) { + blobProperties.blobSequenceNumber = parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]); + delete blobProperties["x-ms-blob-sequence-number"]; + } + if (blobPropertiesInXML["BlobType"]) { + blobProperties.blobType = blobPropertiesInXML["BlobType"]; + delete blobProperties["BlobType"]; + } + if (blobPropertiesInXML["LeaseStatus"]) { + blobProperties.leaseStatus = blobPropertiesInXML["LeaseStatus"]; + delete blobProperties["LeaseStatus"]; + } + if (blobPropertiesInXML["LeaseState"]) { + blobProperties.leaseState = blobPropertiesInXML["LeaseState"]; + delete blobProperties["LeaseState"]; + } + if (blobPropertiesInXML["LeaseDuration"]) { + blobProperties.leaseDuration = blobPropertiesInXML["LeaseDuration"]; + delete blobProperties["LeaseDuration"]; + } + if (blobPropertiesInXML["CopyId"]) { + blobProperties.copyId = blobPropertiesInXML["CopyId"]; + delete blobProperties["CopyId"]; + } + if (blobPropertiesInXML["CopyStatus"]) { + blobProperties.copyStatus = blobPropertiesInXML["CopyStatus"]; + delete blobProperties["CopyStatus"]; + } + if (blobPropertiesInXML["CopySource"]) { + blobProperties.copySource = blobPropertiesInXML["CopySource"]; + delete blobProperties["CopySource"]; + } + if (blobPropertiesInXML["CopyProgress"]) { + blobProperties.copyProgress = blobPropertiesInXML["CopyProgress"]; + delete blobProperties["CopyProgress"]; + } + if (blobPropertiesInXML["CopyCompletionTime"]) { + blobProperties.copyCompletedOn = new Date(blobPropertiesInXML["CopyCompletionTime"]); + delete blobProperties["CopyCompletionTime"]; + } + if (blobPropertiesInXML["CopyStatusDescription"]) { + blobProperties.copyStatusDescription = blobPropertiesInXML["CopyStatusDescription"]; + delete blobProperties["CopyStatusDescription"]; + } + if (blobPropertiesInXML["ServerEncrypted"]) { + blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML["ServerEncrypted"]); + delete blobProperties["ServerEncrypted"]; + } + if (blobPropertiesInXML["IncrementalCopy"]) { + blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML["IncrementalCopy"]); + delete blobProperties["IncrementalCopy"]; + } + if (blobPropertiesInXML["DestinationSnapshot"]) { + blobProperties.destinationSnapshot = blobPropertiesInXML["DestinationSnapshot"]; + delete blobProperties["DestinationSnapshot"]; + } + if (blobPropertiesInXML["DeletedTime"]) { + blobProperties.deletedOn = new Date(blobPropertiesInXML["DeletedTime"]); + delete blobProperties["DeletedTime"]; + } + if (blobPropertiesInXML["RemainingRetentionDays"]) { + blobProperties.remainingRetentionDays = parseFloat(blobPropertiesInXML["RemainingRetentionDays"]); + delete blobProperties["RemainingRetentionDays"]; + } + if (blobPropertiesInXML["AccessTier"]) { + blobProperties.accessTier = blobPropertiesInXML["AccessTier"]; + delete blobProperties["AccessTier"]; + } + if (blobPropertiesInXML["AccessTierInferred"]) { + blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML["AccessTierInferred"]); + delete blobProperties["AccessTierInferred"]; + } + if (blobPropertiesInXML["ArchiveStatus"]) { + blobProperties.archiveStatus = blobPropertiesInXML["ArchiveStatus"]; + delete blobProperties["ArchiveStatus"]; + } + if (blobPropertiesInXML["CustomerProvidedKeySha256"]) { + blobProperties.customerProvidedKeySha256 = blobPropertiesInXML["CustomerProvidedKeySha256"]; + delete blobProperties["CustomerProvidedKeySha256"]; + } + if (blobPropertiesInXML["EncryptionScope"]) { + blobProperties.encryptionScope = blobPropertiesInXML["EncryptionScope"]; + delete blobProperties["EncryptionScope"]; + } + if (blobPropertiesInXML["AccessTierChangeTime"]) { + blobProperties.accessTierChangedOn = new Date(blobPropertiesInXML["AccessTierChangeTime"]); + delete blobProperties["AccessTierChangeTime"]; + } + if (blobPropertiesInXML["TagCount"]) { + blobProperties.tagCount = parseFloat(blobPropertiesInXML["TagCount"]); + delete blobProperties["TagCount"]; + } + if (blobPropertiesInXML["Expiry-Time"]) { + blobProperties.expiresOn = new Date(blobPropertiesInXML["Expiry-Time"]); + delete blobProperties["Expiry-Time"]; + } + if (blobPropertiesInXML["Sealed"]) { + blobProperties.isSealed = ParseBoolean(blobPropertiesInXML["Sealed"]); + delete blobProperties["Sealed"]; + } + if (blobPropertiesInXML["RehydratePriority"]) { + blobProperties.rehydratePriority = blobPropertiesInXML["RehydratePriority"]; + delete blobProperties["RehydratePriority"]; + } + if (blobPropertiesInXML["LastAccessTime"]) { + blobProperties.lastAccessedOn = new Date(blobPropertiesInXML["LastAccessTime"]); + delete blobProperties["LastAccessTime"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyUntilDate"]) { + blobProperties.immutabilityPolicyExpiresOn = new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]); + delete blobProperties["ImmutabilityPolicyUntilDate"]; + } + if (blobPropertiesInXML["ImmutabilityPolicyMode"]) { + blobProperties.immutabilityPolicyMode = blobPropertiesInXML["ImmutabilityPolicyMode"]; + delete blobProperties["ImmutabilityPolicyMode"]; + } + if (blobPropertiesInXML["LegalHold"]) { + blobProperties.legalHold = ParseBoolean(blobPropertiesInXML["LegalHold"]); + delete blobProperties["LegalHold"]; + } + return blobProperties; +} +function ParseBlobItem(blobInXML) { + const blobItem = blobInXML; + blobItem.properties = ParseBlobProperties(blobInXML["Properties"]); + delete blobItem["Properties"]; + blobItem.name = ParseBlobName(blobInXML["Name"]); + delete blobItem["Name"]; + blobItem.deleted = ParseBoolean(blobInXML["Deleted"]); + delete blobItem["Deleted"]; + if (blobInXML["Snapshot"]) { + blobItem.snapshot = blobInXML["Snapshot"]; + delete blobItem["Snapshot"]; + } + if (blobInXML["VersionId"]) { + blobItem.versionId = blobInXML["VersionId"]; + delete blobItem["VersionId"]; + } + if (blobInXML["IsCurrentVersion"]) { + blobItem.isCurrentVersion = ParseBoolean(blobInXML["IsCurrentVersion"]); + delete blobItem["IsCurrentVersion"]; + } + if (blobInXML["Metadata"]) { + blobItem.metadata = blobInXML["Metadata"]; + delete blobItem["Metadata"]; + } + if (blobInXML["Tags"]) { + blobItem.blobTags = ParseBlobTags(blobInXML["Tags"]); + delete blobItem["Tags"]; + } + if (blobInXML["OrMetadata"]) { + blobItem.objectReplicationMetadata = blobInXML["OrMetadata"]; + delete blobItem["OrMetadata"]; + } + if (blobInXML["HasVersionsOnly"]) { + blobItem.hasVersionsOnly = ParseBoolean(blobInXML["HasVersionsOnly"]); + delete blobItem["HasVersionsOnly"]; + } + return blobItem; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; + } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; + } + } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + return this._nextPolicy.sendRequest(request); + } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; + } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; + } + } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); + } + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; + } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends coreHttp.BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); + } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicy is a policy used to tag user-agent header for every requests. + */ +class TelemetryPolicy extends coreHttp.BaseRequestPolicy { + /** + * Creates an instance of TelemetryPolicy. + * @param nextPolicy - + * @param options - + * @param telemetry - + */ + constructor(nextPolicy, options, telemetry) { + super(nextPolicy, options); + this.telemetry = telemetry; + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreHttp.isNode) { + if (!request.headers) { + request.headers = new coreHttp.HttpHeaders(); + } + if (!request.headers.get(HeaderConstants.USER_AGENT)) { + request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); + } + } + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. + */ +class TelemetryPolicyFactory { + /** + * Creates an instance of TelemetryPolicyFactory. + * @param telemetry - + */ + constructor(telemetry) { + const userAgentInfo = []; + if (coreHttp.isNode) { + if (telemetry) { + const telemetryString = telemetry.userAgentPrefix || ""; + if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { + userAgentInfo.push(telemetryString); + } + } + // e.g. azsdk-js-storageblob/10.0.0 + const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; + if (userAgentInfo.indexOf(libInfo) === -1) { + userAgentInfo.push(libInfo); + } + // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) + let runtimeInfo = `(NODE-VERSION ${process.version})`; + if (os__namespace) { + runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; + } + if (userAgentInfo.indexOf(runtimeInfo) === -1) { + userAgentInfo.push(runtimeInfo); + } + } + this.telemetryString = userAgentInfo.join(" "); + } + /** + * Creates a TelemetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + } +} + +// Copyright (c) Microsoft Corporation. +const _defaultHttpClient = new coreHttp.DefaultHttpClient(); +function getCachedDefaultHttpClient() { + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + // when options.httpClient is not specified, passing in a DefaultHttpClient instance to + // avoid each client creating its own http client. + this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; + } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + var _a; + if (credential === undefined) { + credential = new AnonymousCredential(); + } + // Order is important. Closer to the API at the top & closer to the network at the bottom. + // The credential's policy factory must appear close to the wire so it can sign any + // changes made by other factories (like UniqueRequestIDPolicyFactory) + const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); + const factories = [ + coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), + coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), + telemetryPolicy, + coreHttp.generateClientRequestIdPolicy(), + new StorageBrowserPolicyFactory(), + new StorageRetryPolicyFactory(pipelineOptions.retryOptions), + // Default deserializationPolicy is provided by protocol layer + // Use customized XML char key of "#" so we could deserialize metadata + // with "_" key + coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), + coreHttp.logPolicy({ + logger: logger.info, + allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), + ]; + if (coreHttp.isNode) { + // policies only available in Node.js runtime, not in browsers + factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); + factories.push(coreHttp.disableResponseDecompressionPolicy()); + } + factories.push(coreHttp.isTokenCredential(credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) + : credential); + return new Pipeline(factories, pipelineOptions); +} + +// Copyright (c) Microsoft Corporation. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const packageName = "azure-storage-blob"; +const packageVersion = "12.11.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url, options) { + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + if (!options.userAgent) { + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); + options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; + } + super(undefined, options); + this.requestContentType = "application/json; charset=utf-8"; + this.baseUri = options.endpoint || "{url}"; + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2021-08-06"; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +class StorageClient { + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + constructor(url, pipeline) { + // URL should be encoded and only once, protocol layer shouldn't encode URL again + this.url = escapeURLPath(url); + this.accountName = getAccountNameFromUrl(url); + this.pipeline = pipeline; + this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); + this.credential = new AnonymousCredential(); + for (const factory of this.pipeline.factories) { + if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || + factory instanceof AnonymousCredential) { + this.credential = factory; + } + else if (coreHttp.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + this.credential = factory.credential; + } + } + // Override protocol layer's default content-type + const storageClientContext = this.storageClientContext; + storageClientContext.requestContentType = undefined; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a span using the global tracer. + * @internal + */ +const createSpan = coreTracing.createSpanFunction({ + packagePrefix: "Azure.Storage.Blob", + namespace: "Microsoft.Storage", +}); +/** + * @internal + * + * Adapt the tracing options from OperationOptions to what they need to be for + * RequestOptionsBase (when we update to later OpenTelemetry versions this is now + * two separate fields, not just one). + */ +function convertTracingToRequestOptionsBase(options) { + var _a, _b; + return { + // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. + spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class BlobSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const blobSASPermissions = new BlobSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + blobSASPermissions.read = true; + break; + case "a": + blobSASPermissions.add = true; + break; + case "c": + blobSASPermissions.create = true; + break; + case "w": + blobSASPermissions.write = true; + break; + case "d": + blobSASPermissions.delete = true; + break; + case "x": + blobSASPermissions.deleteVersion = true; + break; + case "t": + blobSASPermissions.tag = true; + break; + case "m": + blobSASPermissions.move = true; + break; + case "e": + blobSASPermissions.execute = true; + break; + case "i": + blobSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + blobSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission: ${char}`); + } + } + return blobSASPermissions; + } + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const blobSASPermissions = new BlobSASPermissions(); + if (permissionLike.read) { + blobSASPermissions.read = true; + } + if (permissionLike.add) { + blobSASPermissions.add = true; + } + if (permissionLike.create) { + blobSASPermissions.create = true; + } + if (permissionLike.write) { + blobSASPermissions.write = true; + } + if (permissionLike.delete) { + blobSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + blobSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + blobSASPermissions.tag = true; + } + if (permissionLike.move) { + blobSASPermissions.move = true; + } + if (permissionLike.execute) { + blobSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + blobSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } + return blobSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class ContainerSASPermissions { + constructor() { + /** + * Specifies Read access granted. + */ + this.read = false; + /** + * Specifies Add access granted. + */ + this.add = false; + /** + * Specifies Create access granted. + */ + this.create = false; + /** + * Specifies Write access granted. + */ + this.write = false; + /** + * Specifies Delete access granted. + */ + this.delete = false; + /** + * Specifies Delete version access granted. + */ + this.deleteVersion = false; + /** + * Specifies List access granted. + */ + this.list = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Specifies Move access granted. + */ + this.move = false; + /** + * Specifies Execute access granted. + */ + this.execute = false; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; + } + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions) { + const containerSASPermissions = new ContainerSASPermissions(); + for (const char of permissions) { + switch (char) { + case "r": + containerSASPermissions.read = true; + break; + case "a": + containerSASPermissions.add = true; + break; + case "c": + containerSASPermissions.create = true; + break; + case "w": + containerSASPermissions.write = true; + break; + case "d": + containerSASPermissions.delete = true; + break; + case "l": + containerSASPermissions.list = true; + break; + case "t": + containerSASPermissions.tag = true; + break; + case "x": + containerSASPermissions.deleteVersion = true; + break; + case "m": + containerSASPermissions.move = true; + break; + case "e": + containerSASPermissions.execute = true; + break; + case "i": + containerSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; + default: + throw new RangeError(`Invalid permission ${char}`); + } + } + return containerSASPermissions; + } + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const containerSASPermissions = new ContainerSASPermissions(); + if (permissionLike.read) { + containerSASPermissions.read = true; + } + if (permissionLike.add) { + containerSASPermissions.add = true; + } + if (permissionLike.create) { + containerSASPermissions.create = true; + } + if (permissionLike.write) { + containerSASPermissions.write = true; + } + if (permissionLike.delete) { + containerSASPermissions.delete = true; + } + if (permissionLike.list) { + containerSASPermissions.list = true; + } + if (permissionLike.deleteVersion) { + containerSASPermissions.deleteVersion = true; + } + if (permissionLike.tag) { + containerSASPermissions.tag = true; + } + if (permissionLike.move) { + containerSASPermissions.move = true; + } + if (permissionLike.execute) { + containerSASPermissions.execute = true; + } + if (permissionLike.setImmutabilityPolicy) { + containerSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } + return containerSASPermissions; + } + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.list) { + permissions.push("l"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.move) { + permissions.push("m"); + } + if (this.execute) { + permissions.push("e"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * UserDelegationKeyCredential is only used for generation of user delegation SAS. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas + */ +class UserDelegationKeyCredential { + /** + * Creates an instance of UserDelegationKeyCredential. + * @param accountName - + * @param userDelegationKey - + */ + constructor(accountName, userDelegationKey) { + this.accountName = accountName; + this.userDelegationKey = userDelegationKey; + this.key = Buffer.from(userDelegationKey.value, "base64"); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate SasIPRange format string. For example: + * + * "8.8.8.8" or "1.1.1.1-255.255.255.255" + * + * @param ipRange - + */ +function ipRangeToString(ipRange) { + return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; +} + +// Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; +(function (SASProtocol) { + /** + * Protocol that allows HTTPS only + */ + SASProtocol["Https"] = "https"; + /** + * Protocol that allows both HTTPS and HTTP + */ + SASProtocol["HttpsAndHttp"] = "https,http"; +})(exports.SASProtocol || (exports.SASProtocol = {})); +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +class SASQueryParameters { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { + this.version = version; + this.signature = signature; + if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { + // SASQueryParametersOptions + this.permissions = permissionsOrOptions.permissions; + this.services = permissionsOrOptions.services; + this.resourceTypes = permissionsOrOptions.resourceTypes; + this.protocol = permissionsOrOptions.protocol; + this.startsOn = permissionsOrOptions.startsOn; + this.expiresOn = permissionsOrOptions.expiresOn; + this.ipRangeInner = permissionsOrOptions.ipRange; + this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; + this.resource = permissionsOrOptions.resource; + this.cacheControl = permissionsOrOptions.cacheControl; + this.contentDisposition = permissionsOrOptions.contentDisposition; + this.contentEncoding = permissionsOrOptions.contentEncoding; + this.contentLanguage = permissionsOrOptions.contentLanguage; + this.contentType = permissionsOrOptions.contentType; + if (permissionsOrOptions.userDelegationKey) { + this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; + this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; + this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; + this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; + this.signedService = permissionsOrOptions.userDelegationKey.signedService; + this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; + this.correlationId = permissionsOrOptions.correlationId; + } + } + else { + this.services = services; + this.resourceTypes = resourceTypes; + this.expiresOn = expiresOn; + this.permissions = permissionsOrOptions; + this.protocol = protocol; + this.startsOn = startsOn; + this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; + this.identifier = identifier; + this.resource = resource; + this.cacheControl = cacheControl; + this.contentDisposition = contentDisposition; + this.contentEncoding = contentEncoding; + this.contentLanguage = contentLanguage; + this.contentType = contentType; + if (userDelegationKey) { + this.signedOid = userDelegationKey.signedObjectId; + this.signedTenantId = userDelegationKey.signedTenantId; + this.signedStartsOn = userDelegationKey.signedStartsOn; + this.signedExpiresOn = userDelegationKey.signedExpiresOn; + this.signedService = userDelegationKey.signedService; + this.signedVersion = userDelegationKey.signedVersion; + this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; + this.correlationId = correlationId; + } + } + } + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString() { + const params = [ + "sv", + "ss", + "srt", + "spr", + "st", + "se", + "sip", + "si", + "ses", + "skoid", + "sktid", + "skt", + "ske", + "sks", + "skv", + "sr", + "sp", + "sig", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "saoid", + "scid", + ]; + const queries = []; + for (const param of params) { + switch (param) { + case "sv": + this.tryAppendQueryParameter(queries, param, this.version); + break; + case "ss": + this.tryAppendQueryParameter(queries, param, this.services); + break; + case "srt": + this.tryAppendQueryParameter(queries, param, this.resourceTypes); + break; + case "spr": + this.tryAppendQueryParameter(queries, param, this.protocol); + break; + case "st": + this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); + break; + case "se": + this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); + break; + case "sip": + this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); + break; + case "si": + this.tryAppendQueryParameter(queries, param, this.identifier); + break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; + case "skoid": // Signed object ID + this.tryAppendQueryParameter(queries, param, this.signedOid); + break; + case "sktid": // Signed tenant ID + this.tryAppendQueryParameter(queries, param, this.signedTenantId); + break; + case "skt": // Signed key start time + this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); + break; + case "ske": // Signed key expiry time + this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); + break; + case "sks": // Signed key service + this.tryAppendQueryParameter(queries, param, this.signedService); + break; + case "skv": // Signed key version + this.tryAppendQueryParameter(queries, param, this.signedVersion); + break; + case "sr": + this.tryAppendQueryParameter(queries, param, this.resource); + break; + case "sp": + this.tryAppendQueryParameter(queries, param, this.permissions); + break; + case "sig": + this.tryAppendQueryParameter(queries, param, this.signature); + break; + case "rscc": + this.tryAppendQueryParameter(queries, param, this.cacheControl); + break; + case "rscd": + this.tryAppendQueryParameter(queries, param, this.contentDisposition); + break; + case "rsce": + this.tryAppendQueryParameter(queries, param, this.contentEncoding); + break; + case "rscl": + this.tryAppendQueryParameter(queries, param, this.contentLanguage); + break; + case "rsct": + this.tryAppendQueryParameter(queries, param, this.contentType); + break; + case "saoid": + this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); + break; + case "scid": + this.tryAppendQueryParameter(queries, param, this.correlationId); + break; + } + } + return queries.join("&"); + } + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + tryAppendQueryParameter(queries, key, value) { + if (!value) { + return; + } + key = encodeURIComponent(key); + value = encodeURIComponent(value); + if (key.length > 0 && value.length > 0) { + queries.push(`${key}=${value}`); + } + } +} + +// Copyright (c) Microsoft Corporation. +function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential + ? sharedKeyCredentialOrUserDelegationKey + : undefined; + let userDelegationKeyCredential; + if (sharedKeyCredential === undefined && accountName !== undefined) { + userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); + } + if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { + throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); + } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } + // Version 2019-12-12 adds support for the blob tags permission. + // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string + if (version >= "2018-11-09") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); + } + else { + // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. + if (version >= "2020-02-10") { + return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); + } + } + } + if (version >= "2015-04-05") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); + } + else { + throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); + } + } + throw new RangeError("'version' must be >= '2015-04-05'."); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + if (blobSASSignatureValues.blobName) { + resource = "b"; + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} +function getCanonicalName(accountName, containerName, blobName) { + // Container: "/blob/account/containerName" + // Blob: "/blob/account/containerName/blobName" + const elements = [`/blob/${accountName}/${containerName}`]; + if (blobName) { + elements.push(`/${blobName}`); + } + return elements.join(""); +} +function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { + const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { + throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { + throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); + } + if (blobSASSignatureValues.versionId && version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); + } + if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { + throw RangeError("Must provide 'blobName' when providing 'versionId'."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); + } + if (version < "2020-02-10" && + blobSASSignatureValues.permissions && + (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); + } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } + if (version < "2020-02-10" && + (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); + } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + blobSASSignatureValues.version = version; + return blobSASSignatureValues; +} + +// Copyright (c) Microsoft Corporation. +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +class BlobLeaseClient { + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client, leaseId) { + const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + this._url = client.url; + if (client.name === undefined) { + this._isContainer = true; + this._containerOrBlobOperation = new Container(clientContext); + } + else { + this._isContainer = false; + this._containerOrBlobOperation = new Blob$1(clientContext); + } + if (!leaseId) { + leaseId = coreHttp.generateUuid(); + } + this._leaseId = leaseId; + } + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + async acquireLease(duration, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + async changeLease(proposedLeaseId, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + this._leaseId = proposedLeaseId; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + async releaseLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + async renewLease(options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + async breakLease(breakPeriod, options = {}) { + var _a, _b, _c, _d, _e, _f; + const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + if (this._isContainer && + ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || + (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || + ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { + throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); + } + try { + const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); + return await this._containerOrBlobOperation.breakLease(operationOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. + */ +class RetriableReadableStream extends stream.Readable { + /** + * Creates an instance of RetriableReadableStream. + * + * @param source - The current ReadableStream returned from getter + * @param getter - A method calling downloading request returning + * a new ReadableStream from specified offset + * @param offset - Offset position in original data source to read + * @param count - How much data in original data source to read + * @param options - + */ + constructor(source, getter, offset, count, options = {}) { + super({ highWaterMark: options.highWaterMark }); + this.retries = 0; + this.sourceDataHandler = (data) => { + if (this.options.doInjectErrorOnce) { + this.options.doInjectErrorOnce = undefined; + this.source.pause(); + this.source.removeAllListeners("data"); + this.source.emit("end"); + return; + } + // console.log( + // `Offset: ${this.offset}, Received ${data.length} from internal stream` + // ); + this.offset += data.length; + if (this.onProgress) { + this.onProgress({ loadedBytes: this.offset - this.start }); + } + if (!this.push(data)) { + this.source.pause(); + } + }; + this.sourceErrorOrEndHandler = (err) => { + if (err && err.name === "AbortError") { + this.destroy(err); + return; + } + // console.log( + // `Source stream emits end or error, offset: ${ + // this.offset + // }, dest end : ${this.end}` + // ); + this.removeSourceEventHandlers(); + if (this.offset - 1 === this.end) { + this.push(null); + } + else if (this.offset <= this.end) { + // console.log( + // `retries: ${this.retries}, max retries: ${this.maxRetries}` + // ); + if (this.retries < this.maxRetryRequests) { + this.retries += 1; + this.getter(this.offset) + .then((newSource) => { + this.source = newSource; + this.setSourceEventHandlers(); + return; + }) + .catch((error) => { + this.destroy(error); + }); + } + else { + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + } + } + else { + this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); + } + }; + this.getter = getter; + this.source = source; + this.start = offset; + this.offset = offset; + this.end = offset + count - 1; + this.maxRetryRequests = + options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; + this.onProgress = options.onProgress; + this.options = options; + this.setSourceEventHandlers(); + } + _read() { + this.source.resume(); + } + setSourceEventHandlers() { + this.source.on("data", this.sourceDataHandler); + this.source.on("end", this.sourceErrorOrEndHandler); + this.source.on("error", this.sourceErrorOrEndHandler); + } + removeSourceEventHandlers() { + this.source.removeListener("data", this.sourceDataHandler); + this.source.removeListener("end", this.sourceErrorOrEndHandler); + this.source.removeListener("error", this.sourceErrorOrEndHandler); + } + _destroy(error, callback) { + // remove listener from source and release source + this.removeSourceEventHandlers(); + this.source.destroy(); + callback(error === null ? undefined : error); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will + * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot + * trigger retries defined in pipeline retry policy.) + * + * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js + * Readable stream. + */ +class BlobDownloadResponse { + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return this.originalResponse.copyCompletedOn; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The number of tags associated with the blob + * + * @readonly + */ + get tagCount() { + return this.originalResponse.tagCount; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * Returns the UTC date and time generated by the service that indicates the time at which the blob was + * last read or written to. + * + * @readonly + */ + get lastAccessed() { + return this.originalResponse.lastAccessed; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the Blob service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * Indicates the versionId of the downloaded blob version. + * + * @readonly + */ + get versionId() { + return this.originalResponse.versionId; + } + /** + * Indicates whether version of this blob is a current version. + * + * @readonly + */ + get isCurrentVersion() { + return this.originalResponse.isCurrentVersion; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * Object Replication Policy Id of the destination blob. + * + * @readonly + */ + get objectReplicationDestinationPolicyId() { + return this.originalResponse.objectReplicationDestinationPolicyId; + } + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + * + * @readonly + */ + get objectReplicationSourceProperties() { + return this.originalResponse.objectReplicationSourceProperties; + } + /** + * If this blob has been sealed. + * + * @readonly + */ + get isSealed() { + return this.originalResponse.isSealed; + } + /** + * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. + * + * @readonly + */ + get immutabilityPolicyExpiresOn() { + return this.originalResponse.immutabilityPolicyExpiresOn; + } + /** + * Indicates immutability policy mode. + * + * @readonly + */ + get immutabilityPolicyMode() { + return this.originalResponse.immutabilityPolicyMode; + } + /** + * Indicates if a legal hold is present on the blob. + * + * @readonly + */ + get legalHold() { + return this.originalResponse.legalHold; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get contentAsBlob() { + return this.originalResponse.blobBody; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will automatically retry when internal read stream unexpected ends. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const AVRO_SYNC_MARKER_SIZE = 16; +const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); +const AVRO_CODEC_KEY = "avro.codec"; +const AVRO_SCHEMA_KEY = "avro.schema"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroParser { + /** + * Reads a fixed number of bytes from the stream. + * + * @param stream - + * @param length - + * @param options - + */ + static async readFixedBytes(stream, length, options = {}) { + const bytes = await stream.read(length, { abortSignal: options.abortSignal }); + if (bytes.length !== length) { + throw new Error("Hit stream end."); + } + return bytes; + } + /** + * Reads a single byte from the stream. + * + * @param stream - + * @param options - + */ + static async readByte(stream, options = {}) { + const buf = await AvroParser.readFixedBytes(stream, 1, options); + return buf[0]; + } + // int and long are stored in variable-length zig-zag coding. + // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt + // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types + static async readZigZagLong(stream, options = {}) { + let zigZagEncoded = 0; + let significanceInBit = 0; + let byte, haveMoreByte, significanceInFloat; + do { + byte = await AvroParser.readByte(stream, options); + haveMoreByte = byte & 0x80; + zigZagEncoded |= (byte & 0x7f) << significanceInBit; + significanceInBit += 7; + } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers + if (haveMoreByte) { + // Switch to float arithmetic + // eslint-disable-next-line no-self-assign + zigZagEncoded = zigZagEncoded; + significanceInFloat = 268435456; // 2 ** 28. + do { + byte = await AvroParser.readByte(stream, options); + zigZagEncoded += (byte & 0x7f) * significanceInFloat; + significanceInFloat *= 128; // 2 ** 7 + } while (byte & 0x80); + const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; + if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { + throw new Error("Integer overflow."); + } + return res; + } + return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); + } + static async readLong(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readInt(stream, options = {}) { + return AvroParser.readZigZagLong(stream, options); + } + static async readNull() { + return null; + } + static async readBoolean(stream, options = {}) { + const b = await AvroParser.readByte(stream, options); + if (b === 1) { + return true; + } + else if (b === 0) { + return false; + } + else { + throw new Error("Byte was not a boolean."); + } + } + static async readFloat(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 4, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat32(0, true); // littleEndian = true + } + static async readDouble(stream, options = {}) { + const u8arr = await AvroParser.readFixedBytes(stream, 8, options); + const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); + return view.getFloat64(0, true); // littleEndian = true + } + static async readBytes(stream, options = {}) { + const size = await AvroParser.readLong(stream, options); + if (size < 0) { + throw new Error("Bytes size was negative."); + } + return stream.read(size, { abortSignal: options.abortSignal }); + } + static async readString(stream, options = {}) { + const u8arr = await AvroParser.readBytes(stream, options); + const utf8decoder = new TextDecoder(); + return utf8decoder.decode(u8arr); + } + static async readMapPair(stream, readItemMethod, options = {}) { + const key = await AvroParser.readString(stream, options); + // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. + const value = await readItemMethod(stream, options); + return { key, value }; + } + static async readMap(stream, readItemMethod, options = {}) { + const readPairMethod = (s, opts = {}) => { + return AvroParser.readMapPair(s, readItemMethod, opts); + }; + const pairs = await AvroParser.readArray(stream, readPairMethod, options); + const dict = {}; + for (const pair of pairs) { + dict[pair.key] = pair.value; + } + return dict; + } + static async readArray(stream, readItemMethod, options = {}) { + const items = []; + for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { + if (count < 0) { + // Ignore block sizes + await AvroParser.readLong(stream, options); + count = -count; + } + while (count--) { + const item = await readItemMethod(stream, options); + items.push(item); + } + } + return items; + } +} +var AvroComplex; +(function (AvroComplex) { + AvroComplex["RECORD"] = "record"; + AvroComplex["ENUM"] = "enum"; + AvroComplex["ARRAY"] = "array"; + AvroComplex["MAP"] = "map"; + AvroComplex["UNION"] = "union"; + AvroComplex["FIXED"] = "fixed"; +})(AvroComplex || (AvroComplex = {})); +var AvroPrimitive; +(function (AvroPrimitive) { + AvroPrimitive["NULL"] = "null"; + AvroPrimitive["BOOLEAN"] = "boolean"; + AvroPrimitive["INT"] = "int"; + AvroPrimitive["LONG"] = "long"; + AvroPrimitive["FLOAT"] = "float"; + AvroPrimitive["DOUBLE"] = "double"; + AvroPrimitive["BYTES"] = "bytes"; + AvroPrimitive["STRING"] = "string"; +})(AvroPrimitive || (AvroPrimitive = {})); +class AvroType { + /** + * Determines the AvroType from the Avro Schema. + */ + static fromSchema(schema) { + if (typeof schema === "string") { + return AvroType.fromStringSchema(schema); + } + else if (Array.isArray(schema)) { + return AvroType.fromArraySchema(schema); + } + else { + return AvroType.fromObjectSchema(schema); + } + } + static fromStringSchema(schema) { + switch (schema) { + case AvroPrimitive.NULL: + case AvroPrimitive.BOOLEAN: + case AvroPrimitive.INT: + case AvroPrimitive.LONG: + case AvroPrimitive.FLOAT: + case AvroPrimitive.DOUBLE: + case AvroPrimitive.BYTES: + case AvroPrimitive.STRING: + return new AvroPrimitiveType(schema); + default: + throw new Error(`Unexpected Avro type ${schema}`); + } + } + static fromArraySchema(schema) { + return new AvroUnionType(schema.map(AvroType.fromSchema)); + } + static fromObjectSchema(schema) { + const type = schema.type; + // Primitives can be defined as strings or objects + try { + return AvroType.fromStringSchema(type); + } + catch (err) { + // eslint-disable-line no-empty + } + switch (type) { + case AvroComplex.RECORD: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.name) { + throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); + } + // eslint-disable-next-line no-case-declarations + const fields = {}; + if (!schema.fields) { + throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); + } + for (const field of schema.fields) { + fields[field.name] = AvroType.fromSchema(field.type); + } + return new AvroRecordType(fields, schema.name); + case AvroComplex.ENUM: + if (schema.aliases) { + throw new Error(`aliases currently is not supported, schema: ${schema}`); + } + if (!schema.symbols) { + throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); + } + return new AvroEnumType(schema.symbols); + case AvroComplex.MAP: + if (!schema.values) { + throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); + } + return new AvroMapType(AvroType.fromSchema(schema.values)); + case AvroComplex.ARRAY: // Unused today + case AvroComplex.FIXED: // Unused today + default: + throw new Error(`Unexpected Avro type ${type} in ${schema}`); + } + } +} +class AvroPrimitiveType extends AvroType { + constructor(primitive) { + super(); + this._primitive = primitive; + } + read(stream, options = {}) { + switch (this._primitive) { + case AvroPrimitive.NULL: + return AvroParser.readNull(); + case AvroPrimitive.BOOLEAN: + return AvroParser.readBoolean(stream, options); + case AvroPrimitive.INT: + return AvroParser.readInt(stream, options); + case AvroPrimitive.LONG: + return AvroParser.readLong(stream, options); + case AvroPrimitive.FLOAT: + return AvroParser.readFloat(stream, options); + case AvroPrimitive.DOUBLE: + return AvroParser.readDouble(stream, options); + case AvroPrimitive.BYTES: + return AvroParser.readBytes(stream, options); + case AvroPrimitive.STRING: + return AvroParser.readString(stream, options); + default: + throw new Error("Unknown Avro Primitive"); + } + } +} +class AvroEnumType extends AvroType { + constructor(symbols) { + super(); + this._symbols = symbols; + } + async read(stream, options = {}) { + const value = await AvroParser.readInt(stream, options); + return this._symbols[value]; + } +} +class AvroUnionType extends AvroType { + constructor(types) { + super(); + this._types = types; + } + async read(stream, options = {}) { + const typeIndex = await AvroParser.readInt(stream, options); + return this._types[typeIndex].read(stream, options); + } +} +class AvroMapType extends AvroType { + constructor(itemType) { + super(); + this._itemType = itemType; + } + read(stream, options = {}) { + const readItemMethod = (s, opts) => { + return this._itemType.read(s, opts); + }; + return AvroParser.readMap(stream, readItemMethod, options); + } +} +class AvroRecordType extends AvroType { + constructor(fields, name) { + super(); + this._fields = fields; + this._name = name; + } + async read(stream, options = {}) { + const record = {}; + record["$schema"] = this._name; + for (const key in this._fields) { + if (Object.prototype.hasOwnProperty.call(this._fields, key)) { + record[key] = await this._fields[key].read(stream, options); + } + } + return record; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function arraysEqual(a, b) { + if (a === b) + return true; + // eslint-disable-next-line eqeqeq + if (a == null || b == null) + return false; + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; ++i) { + if (a[i] !== b[i]) + return false; + } + return true; +} + +// Copyright (c) Microsoft Corporation. +class AvroReader { + constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { + this._dataStream = dataStream; + this._headerStream = headerStream || dataStream; + this._initialized = false; + this._blockOffset = currentBlockOffset || 0; + this._objectIndex = indexWithinCurrentBlock || 0; + this._initialBlockOffset = currentBlockOffset || 0; + } + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } + async initialize(options = {}) { + const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { + abortSignal: options.abortSignal, + }); + if (!arraysEqual(header, AVRO_INIT_BYTES)) { + throw new Error("Stream is not an Avro file."); + } + // File metadata is written as if defined by the following map schema: + // { "type": "map", "values": "bytes"} + this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { + abortSignal: options.abortSignal, + }); + // Validate codec + const codec = this._metadata[AVRO_CODEC_KEY]; + if (!(codec === undefined || codec === null || codec === "null")) { + throw new Error("Codecs are not supported"); + } + // The 16-byte, randomly-generated sync marker for this file. + this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + }); + // Parse the schema + const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); + this._itemType = AvroType.fromSchema(schema); + if (this._blockOffset === 0) { + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + } + this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + }); + // skip block length + await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); + this._initialized = true; + if (this._objectIndex && this._objectIndex > 0) { + for (let i = 0; i < this._objectIndex; i++) { + await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); + this._itemsRemainingInBlock--; + } + } + } + hasNext() { + return !this._initialized || this._itemsRemainingInBlock > 0; + } + parseObjects(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { + if (!this._initialized) { + yield tslib.__await(this.initialize(options)); + } + while (this.hasNext()) { + const result = yield tslib.__await(this._itemType.read(this._dataStream, { + abortSignal: options.abortSignal, + })); + this._itemsRemainingInBlock--; + this._objectIndex++; + if (this._itemsRemainingInBlock === 0) { + const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { + abortSignal: options.abortSignal, + })); + this._blockOffset = this._initialBlockOffset + this._dataStream.position; + this._objectIndex = 0; + if (!arraysEqual(this._syncMarker, marker)) { + throw new Error("Stream is not a valid Avro file."); + } + try { + this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { + abortSignal: options.abortSignal, + })); + } + catch (err) { + // We hit the end of the stream. + this._itemsRemainingInBlock = 0; + } + if (this._itemsRemainingInBlock > 0) { + // Ignore block size + yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); + } + } + yield yield tslib.__await(result); + } + }); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +class AvroReadable { +} + +// Copyright (c) Microsoft Corporation. +const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); +class AvroReadableFromStream extends AvroReadable { + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } + toUint8Array(data) { + if (typeof data === "string") { + return Buffer.from(data); + } + return data; + } + get position() { + return this._position; + } + async read(size, options = {}) { + var _a; + if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + throw ABORT_ERROR; + } + if (size < 0) { + throw new Error(`size parameter should be positive: ${size}`); + } + if (size === 0) { + return new Uint8Array(); + } + if (!this._readable.readable) { + throw new Error("Stream no longer readable."); + } + // See if there is already enough data. + const chunk = this._readable.read(size); + if (chunk) { + this._position += chunk.length; + // chunk.length maybe less than desired size if the stream ends. + return this.toUint8Array(chunk); + } + else { + // register callback to wait for enough data to read + return new Promise((resolve, reject) => { + /* eslint-disable @typescript-eslint/no-use-before-define */ + const cleanUp = () => { + this._readable.removeListener("readable", readableCallback); + this._readable.removeListener("error", rejectCallback); + this._readable.removeListener("end", rejectCallback); + this._readable.removeListener("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.removeEventListener("abort", abortHandler); + } + }; + const readableCallback = () => { + const callbackChunk = this._readable.read(size); + if (callbackChunk) { + this._position += callbackChunk.length; + cleanUp(); + // callbackChunk.length maybe less than desired size if the stream ends. + resolve(this.toUint8Array(callbackChunk)); + } + }; + const rejectCallback = () => { + cleanUp(); + reject(); + }; + const abortHandler = () => { + cleanUp(); + reject(ABORT_ERROR); + }; + this._readable.on("readable", readableCallback); + this._readable.once("error", rejectCallback); + this._readable.once("end", rejectCallback); + this._readable.once("close", rejectCallback); + if (options.abortSignal) { + options.abortSignal.addEventListener("abort", abortHandler); + } + /* eslint-enable @typescript-eslint/no-use-before-define */ + }); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. + */ +class BlobQuickQueryStream extends stream.Readable { + /** + * Creates an instance of BlobQuickQueryStream. + * + * @param source - The current ReadableStream returned from getter + * @param options - + */ + constructor(source, options = {}) { + super(); + this.avroPaused = true; + this.source = source; + this.onProgress = options.onProgress; + this.onError = options.onError; + this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); + this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); + } + _read() { + if (this.avroPaused) { + this.readInternal().catch((err) => { + this.emit("error", err); + }); + } + } + async readInternal() { + this.avroPaused = false; + let avroNext; + do { + avroNext = await this.avroIter.next(); + if (avroNext.done) { + break; + } + const obj = avroNext.value; + const schema = obj.$schema; + if (typeof schema !== "string") { + throw Error("Missing schema in avro record."); + } + switch (schema) { + case "com.microsoft.azure.storage.queryBlobContents.resultData": + { + const data = obj.data; + if (data instanceof Uint8Array === false) { + throw Error("Invalid data in avro result record."); + } + if (!this.push(Buffer.from(data))) { + this.avroPaused = true; + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.progress": + { + const bytesScanned = obj.bytesScanned; + if (typeof bytesScanned !== "number") { + throw Error("Invalid bytesScanned in avro progress record."); + } + if (this.onProgress) { + this.onProgress({ loadedBytes: bytesScanned }); + } + } + break; + case "com.microsoft.azure.storage.queryBlobContents.end": + if (this.onProgress) { + const totalBytes = obj.totalBytes; + if (typeof totalBytes !== "number") { + throw Error("Invalid totalBytes in avro end record."); + } + this.onProgress({ loadedBytes: totalBytes }); + } + this.push(null); + break; + case "com.microsoft.azure.storage.queryBlobContents.error": + if (this.onError) { + const fatal = obj.fatal; + if (typeof fatal !== "boolean") { + throw Error("Invalid fatal in avro error record."); + } + const name = obj.name; + if (typeof name !== "string") { + throw Error("Invalid name in avro error record."); + } + const description = obj.description; + if (typeof description !== "string") { + throw Error("Invalid description in avro error record."); + } + const position = obj.position; + if (typeof position !== "number") { + throw Error("Invalid position in avro error record."); + } + this.onError({ + position, + name, + isFatal: fatal, + description, + }); + } + break; + default: + throw Error(`Unknown schema ${schema} in avro progress record.`); + } + } while (!avroNext.done && !this.avroPaused); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will + * parse avor data returned by blob query. + */ +class BlobQueryResponse { + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } + /** + * Indicates that the service supports + * requests for partial file content. + * + * @readonly + */ + get acceptRanges() { + return this.originalResponse.acceptRanges; + } + /** + * Returns if it was previously specified + * for the file. + * + * @readonly + */ + get cacheControl() { + return this.originalResponse.cacheControl; + } + /** + * Returns the value that was specified + * for the 'x-ms-content-disposition' header and specifies how to process the + * response. + * + * @readonly + */ + get contentDisposition() { + return this.originalResponse.contentDisposition; + } + /** + * Returns the value that was specified + * for the Content-Encoding request header. + * + * @readonly + */ + get contentEncoding() { + return this.originalResponse.contentEncoding; + } + /** + * Returns the value that was specified + * for the Content-Language request header. + * + * @readonly + */ + get contentLanguage() { + return this.originalResponse.contentLanguage; + } + /** + * The current sequence number for a + * page blob. This header is not returned for block blobs or append blobs. + * + * @readonly + */ + get blobSequenceNumber() { + return this.originalResponse.blobSequenceNumber; + } + /** + * The blob's type. Possible values include: + * 'BlockBlob', 'PageBlob', 'AppendBlob'. + * + * @readonly + */ + get blobType() { + return this.originalResponse.blobType; + } + /** + * The number of bytes present in the + * response body. + * + * @readonly + */ + get contentLength() { + return this.originalResponse.contentLength; + } + /** + * If the file has an MD5 hash and the + * request is to read the full file, this response header is returned so that + * the client can check for message content integrity. If the request is to + * read a specified range and the 'x-ms-range-get-content-md5' is set to + * true, then the request returns an MD5 hash for the range, as long as the + * range size is less than or equal to 4 MB. If neither of these sets of + * conditions is true, then no value is returned for the 'Content-MD5' + * header. + * + * @readonly + */ + get contentMD5() { + return this.originalResponse.contentMD5; + } + /** + * Indicates the range of bytes returned if + * the client requested a subset of the file by setting the Range request + * header. + * + * @readonly + */ + get contentRange() { + return this.originalResponse.contentRange; + } + /** + * The content type specified for the file. + * The default content type is 'application/octet-stream' + * + * @readonly + */ + get contentType() { + return this.originalResponse.contentType; + } + /** + * Conclusion time of the last attempted + * Copy File operation where this file was the destination file. This value + * can specify the time of a completed, aborted, or failed copy attempt. + * + * @readonly + */ + get copyCompletedOn() { + return undefined; + } + /** + * String identifier for the last attempted Copy + * File operation where this file was the destination file. + * + * @readonly + */ + get copyId() { + return this.originalResponse.copyId; + } + /** + * Contains the number of bytes copied and + * the total bytes in the source in the last attempted Copy File operation + * where this file was the destination file. Can show between 0 and + * Content-Length bytes copied. + * + * @readonly + */ + get copyProgress() { + return this.originalResponse.copyProgress; + } + /** + * URL up to 2KB in length that specifies the + * source file used in the last attempted Copy File operation where this file + * was the destination file. + * + * @readonly + */ + get copySource() { + return this.originalResponse.copySource; + } + /** + * State of the copy operation + * identified by 'x-ms-copy-id'. Possible values include: 'pending', + * 'success', 'aborted', 'failed' + * + * @readonly + */ + get copyStatus() { + return this.originalResponse.copyStatus; + } + /** + * Only appears when + * x-ms-copy-status is failed or pending. Describes cause of fatal or + * non-fatal copy operation failure. + * + * @readonly + */ + get copyStatusDescription() { + return this.originalResponse.copyStatusDescription; + } + /** + * When a blob is leased, + * specifies whether the lease is of infinite or fixed duration. Possible + * values include: 'infinite', 'fixed'. + * + * @readonly + */ + get leaseDuration() { + return this.originalResponse.leaseDuration; + } + /** + * Lease state of the blob. Possible + * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. + * + * @readonly + */ + get leaseState() { + return this.originalResponse.leaseState; + } + /** + * The current lease status of the + * blob. Possible values include: 'locked', 'unlocked'. + * + * @readonly + */ + get leaseStatus() { + return this.originalResponse.leaseStatus; + } + /** + * A UTC date/time value generated by the service that + * indicates the time at which the response was initiated. + * + * @readonly + */ + get date() { + return this.originalResponse.date; + } + /** + * The number of committed blocks + * present in the blob. This header is returned only for append blobs. + * + * @readonly + */ + get blobCommittedBlockCount() { + return this.originalResponse.blobCommittedBlockCount; + } + /** + * The ETag contains a value that you can use to + * perform operations conditionally, in quotes. + * + * @readonly + */ + get etag() { + return this.originalResponse.etag; + } + /** + * The error code. + * + * @readonly + */ + get errorCode() { + return this.originalResponse.errorCode; + } + /** + * The value of this header is set to + * true if the file data and application metadata are completely encrypted + * using the specified algorithm. Otherwise, the value is set to false (when + * the file is unencrypted, or if only parts of the file/application metadata + * are encrypted). + * + * @readonly + */ + get isServerEncrypted() { + return this.originalResponse.isServerEncrypted; + } + /** + * If the blob has a MD5 hash, and if + * request contains range header (Range or x-ms-range), this response header + * is returned with the value of the whole blob's MD5 value. This value may + * or may not be equal to the value returned in Content-MD5 header, with the + * latter calculated from the requested range. + * + * @readonly + */ + get blobContentMD5() { + return this.originalResponse.blobContentMD5; + } + /** + * Returns the date and time the file was last + * modified. Any operation that modifies the file or its properties updates + * the last modified time. + * + * @readonly + */ + get lastModified() { + return this.originalResponse.lastModified; + } + /** + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return undefined; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : undefined; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; +(function (BlockBlobTier) { + /** + * Optimized for storing data that is accessed frequently. + */ + BlockBlobTier["Hot"] = "Hot"; + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + BlockBlobTier["Cool"] = "Cool"; + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + BlockBlobTier["Archive"] = "Archive"; +})(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; +(function (PremiumPageBlobTier) { + /** + * P4 Tier. + */ + PremiumPageBlobTier["P4"] = "P4"; + /** + * P6 Tier. + */ + PremiumPageBlobTier["P6"] = "P6"; + /** + * P10 Tier. + */ + PremiumPageBlobTier["P10"] = "P10"; + /** + * P15 Tier. + */ + PremiumPageBlobTier["P15"] = "P15"; + /** + * P20 Tier. + */ + PremiumPageBlobTier["P20"] = "P20"; + /** + * P30 Tier. + */ + PremiumPageBlobTier["P30"] = "P30"; + /** + * P40 Tier. + */ + PremiumPageBlobTier["P40"] = "P40"; + /** + * P50 Tier. + */ + PremiumPageBlobTier["P50"] = "P50"; + /** + * P60 Tier. + */ + PremiumPageBlobTier["P60"] = "P60"; + /** + * P70 Tier. + */ + PremiumPageBlobTier["P70"] = "P70"; + /** + * P80 Tier. + */ + PremiumPageBlobTier["P80"] = "P80"; +})(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); +function toAccessTier(tier) { + if (tier === undefined) { + return undefined; + } + return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). +} +function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } +} +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Function that converts PageRange and ClearRange to a common Range object. + * PageRange and ClearRange have start and end while Range offset and count + * this function normalizes to Range. + * @param response - Model PageBlob Range response + */ +function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start, + })); + return Object.assign(Object.assign({}, response), { pageRange, + clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange, + } }) }); +} + +// Copyright (c) Microsoft Corporation. +/** + * This is the poller returned by {@link BlobClient.beginCopyFromURL}. + * This can not be instantiated directly outside of this package. + * + * @hidden + */ +class BlobBeginCopyFromUrlPoller extends coreLro.Poller { + constructor(options) { + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, + copySource, + startCopyFromURLOptions })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreHttp.delay(this.intervalInMs); + } +} +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const cancel = async function cancel(options = {}) { + const state = this.state; + const { copyId } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call + await state.blobClient.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const update = async function update(options = {}) { + const state = this.state; + const { blobClient, copySource, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); + // copyId is needed to abort + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } + else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && + copyProgress !== prevCopyProgress && + typeof options.fireProgress === "function") { + // trigger in setTimeout, or swallow error? + options.fireProgress(state); + } + else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } + catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); +}; +/** + * Note: Intentionally using function expression over arrow function expression + * so that the function can be invoked with a different context. + * This affects what `this` refers to. + * @hidden + */ +const toString = function toString() { + return JSON.stringify({ state: this.state }, (key, value) => { + // remove blobClient from serialized state since a client can't be hydrated from this info. + if (key === "blobClient") { + return undefined; + } + return value; + }); +}; +/** + * Creates a poll operation given the provided state. + * @hidden + */ +function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Generate a range string. For example: + * + * "bytes=255-" or "bytes=0-511" + * + * @param iRange - + */ +function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count + ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` + : `bytes=${iRange.offset}-`; +} + +// Copyright (c) Microsoft Corporation. +/** + * States for Batch. + */ +var BatchStates; +(function (BatchStates) { + BatchStates[BatchStates["Good"] = 0] = "Good"; + BatchStates[BatchStates["Error"] = 1] = "Error"; +})(BatchStates || (BatchStates = {})); +/** + * Batch provides basic parallel execution with concurrency limits. + * Will stop execute left operations when one of the executed operation throws an error. + * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. + */ +class Batch { + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + /** + * Number of active operations under execution. + */ + this.actives = 0; + /** + * Number of completed operations under execution. + */ + this.completed = 0; + /** + * Offset of next operation to be executed. + */ + this.offset = 0; + /** + * Operation array to be executed. + */ + this.operations = []; + /** + * States of Batch. When an error happens, state will turn into error. + * Batch will stop execute left operations. + */ + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } + catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } + else { + return; + } + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class generates a readable stream from the data in an array of buffers. + */ +class BuffersStream extends stream.Readable { + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + // check byteLength is no larger than buffers[] total length + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + // The last buffer may be longer than the data it contains. + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + // chunkSize = size - i + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } + else { + // chunkSize = remaining + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + // this.buffers[this.bufferIndex] used up, shift to next one + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } + else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } + else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * maxBufferLength is max size of each buffer in the pooled buffers. + */ +// Can't use import as Typescript doesn't recognize "buffer". +const maxBufferLength = require("buffer").constants.MAX_LENGTH; +/** + * This class provides a buffer container which conceptually has no hard size limit. + * It accepts a capacity, an array of input buffers and the total length of input data. + * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers + * into the internal "buffer" serially with respect to the total length. + * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream + * assembled from all the data in the internal "buffer". + */ +class PooledBuffer { + constructor(capacity, buffers, totalLength) { + /** + * Internal buffers used to keep the data. + * Each buffer has a length of the maxBufferLength except last one. + */ + this.buffers = []; + this.capacity = capacity; + this._size = 0; + // allocate + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + // clear copied from source buffers + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * This class accepts a Node.js Readable stream as input, and keeps reading data + * from the stream into the internal buffer structure, until it reaches maxBuffers. + * Every available buffer will try to trigger outgoingHandler. + * + * The internal buffer structure includes an incoming buffer array, and a outgoing + * buffer array. The incoming buffer array includes the "empty" buffers can be filled + * with new incoming data. The outgoing array includes the filled buffers to be + * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. + * + * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING + * + * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers + * + * PERFORMANCE IMPROVEMENT TIPS: + * 1. Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * 2. concurrency should set a smaller value than maxBuffers, which is helpful to + * reduce the possibility when a outgoing handler waits for the stream data. + * in this situation, outgoing handlers are blocked. + * Outgoing queue shouldn't be empty. + */ +class BufferScheduler { + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + /** + * An internal event emitter. + */ + this.emitter = new events.EventEmitter(); + /** + * An internal offset marker to track data offset in bytes of next outgoingHandler. + */ + this.offset = 0; + /** + * An internal marker to track whether stream is end. + */ + this.isStreamEnd = false; + /** + * An internal marker to track whether stream or outgoingHandler returns error. + */ + this.isError = false; + /** + * How many handlers are executing. + */ + this.executingOutgoingHandlers = 0; + /** + * How many buffers have been allocated. + */ + this.numBuffers = 0; + /** + * Because this class doesn't know how much data every time stream pops, which + * is defined by highWaterMarker of the stream. So BufferScheduler will cache + * data received from the stream, when data in unresolvedDataArray exceeds the + * blockSize defined, it will try to concat a blockSize of buffer, fill into available + * buffers from incoming and push to outgoing array. + */ + this.unresolvedDataArray = []; + /** + * How much data consisted in unresolvedDataArray. + */ + this.unresolvedLength = 0; + /** + * The array includes all the available buffers can be used to fill data from stream. + */ + this.incoming = []; + /** + * The array (queue) includes all the buffers filled from stream data. + */ + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) + .then(resolve) + .catch(reject); + } + else if (this.unresolvedLength >= this.bufferSize) { + return; + } + else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } + else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } + else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } + else { + // No available buffer, wait for buffer returned + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } + catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Reads a readable stream into buffer. Fill the buffer from offset to end. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param offset - From which position in the buffer to be filled, inclusive + * @param end - To which position in the buffer to be filled, exclusive + * @param encoding - Encoding of the Readable stream + */ +async function streamToBuffer(stream, buffer, offset, end, encoding) { + let pos = 0; // Position in stream + const count = end - offset; // Total amount of data needed in stream + return new Promise((resolve, reject) => { + stream.on("readable", () => { + if (pos >= count) { + resolve(); + return; + } + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + // How much data needed in this chunk + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream.on("end", () => { + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream.on("error", reject); + }); +} +/** + * Reads a readable stream into buffer entirely. + * + * @param stream - A Node.js Readable stream + * @param buffer - Buffer to be filled, length must greater than or equal to offset + * @param encoding - Encoding of the Readable stream + * @returns with the count of bytes read. + * @throws `RangeError` If buffer size is not big enough. + */ +async function streamToBuffer2(stream, buffer, encoding) { + let pos = 0; // Position in stream + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream.on("readable", () => { + let chunk = stream.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream.on("end", () => { + resolve(pos); + }); + stream.on("error", reject); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. + * + * @param rs - The read stream. + * @param file - Destination file path. + */ +async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Promisified version of fs.stat(). + */ +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +class BlobClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + let pipeline; + let url; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId) { + return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + // Return browser response immediately + if (!coreHttp.isNode) { + return wrappedRes; + } + // We support retrying when download stream unexpected ends in Node.js runtime + // Following code shouldn't be bundled into browser build, however some + // bundlers may try to bundle following code and "FileReadResponse.ts". + // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" + // The config is in package.json "browser" field + if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { + // TODO: Default value or make it a required parameter? + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === undefined) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start, + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + }; + // Debug purpose only + // console.log( + // `Read from internal stream, range: ${ + // updatedOptions.range + // }, options: ${JSON.stringify(updatedOptions)}` + // ); + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + async setTags(tags, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + try { + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async beginCopyFromURL(copySource, options = {}) { + const client = { + abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), + getProperties: (...args) => this.getProperties(...args), + startCopyFromURL: (...args) => this.startCopyFromURL(...args), + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options, + }); + // Trigger the startCopyFromURL call by calling poll. + // Any errors from this method should be surfaced to the user. + await poller.poll(); + return poller; + } + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + async abortCopyFromURL(copyId, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + async syncCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + async setAccessTier(tier, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } + else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; + } + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + // Customer doesn't specify length, get it + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); + } + } + // Allocate the buffer of size = count if the buffer is not provided + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } + catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); + } + } + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + // Exclusive chunk end position + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + }); + const stream = response.readableStreamBody; + await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); + // Update progress after block is downloaded, in case of block trying + // Could provide finer grained progress updating inside HTTP requests, + // only if convenience layer download try is enabled + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); + } + // The stream is no longer accessible so setting it to undefined. + response.blobDownloadStream = undefined; + return response; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` + // http://localhost:10001/devstoreaccount1/containername/blob + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob + // .getPath() -> /devstoreaccount1/containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } + else { + // "https://customdomain.com/containername/blob". + // .getPath() -> /containername/blob + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + // decode the encoded blobName, containerName - to get all the special characters that might be present in them + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + // Azure Storage Server will replace "\" with "/" in the blob names + // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } + catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); + } + } + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + async startCopyFromURL(copySource, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions, + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +class AppendBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + // The second parameter is undefined. Use anonymous credential. + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); + } + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + async appendBlock(body, contentLength, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +class BlockBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); + } + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration), + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError, + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + async upload(body, contentLength, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + async stageBlock(blockId, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress, + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + async commitBlockList(blocks, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } + else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } + else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); + try { + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || + options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); + } + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + } + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); + } + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); + } + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + // TODO: Hook with convenience layer progress event in finer level + transferProgress += contentLength; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress, + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset, + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + }); + // Update progress after block is successfully uploaded to server, in case of block trying + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil((maxConcurrency / 4) * 3)); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +class PageBlobClient extends BlobClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. + // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string" && + blobNameOrOptions && + typeof blobNameOrOptions === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); + } + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot) { + return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + async create(size, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + async createIfNotExists(size, options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + async uploadPages(body, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress, + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + async clearPages(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + try { + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + async getPageRanges(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + try { + return await this.pageBlobContext + .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + async listPageRangesSegment(offset = 0, count, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + try { + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItemSegments(offset = 0, count, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + listPageRangeItems(offset = 0, count, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); + try { + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset: offset, + count: count, + }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker || marker === undefined) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); + marker = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + // AsyncIterableIterator to iterate over blobs + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); + try { + return await this.pageBlobContext + .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) + .then(rangeResponseFromModel); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); + try { + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + try { + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + async startCopyIncremental(copySource, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); + try { + return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +// Copyright (c) Microsoft Corporation. +async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + // Slice the buffer to trim the empty ending. + buffer = buffer.slice(0, responseLength); + return buffer.toString(); +} +function utf8ByteLength(str) { + return Buffer.byteLength(str); +} + +// Copyright (c) Microsoft Corporation. +const HTTP_HEADER_DELIMITER = ": "; +const SPACE_DELIMITER = " "; +const NOT_FOUND = -1; +/** + * Util class for parsing batch response. + */ +class BatchResponseParser { + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + // In special case(reported), server may return invalid content-type which could not be parsed. + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + // This should be prevent during coding. + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse + // sub request's response. + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText + .split(this.batchResponseEnding)[0] // string after ending is useless + .split(this.perResponsePrefix) + .slice(1); // string before first response boundary is useless + const subResponseCount = subResponses.length; + // Defensive coding in case of potential error parsing. + // Note: subResponseCount == 1 is special case where sub request is invalid. + // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. + // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + // Parse sub subResponses. + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + // Convention line to indicate content ID + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + // Http version line with status code indicates the start of sub request's response. + // Example: HTTP/1.1 202 Accepted + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * + } + if (responseLine.trim() === "") { + // Sub response's header start line already found, and the first empty line indicates header end line found. + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; // Skip empty line + } + // Note: when code reach here, it indicates subRespHeaderStartFound == true + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + // Defensive coding to prevent from missing valuable lines. + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + // Parse headers of sub response. + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } + else { + // Assemble body of sub response. + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } // Inner for end + // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. + // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it + // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that + // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. + if (contentId !== NOT_FOUND && + Number.isInteger(contentId) && + contentId >= 0 && + contentId < this.subRequests.size && + deserializedSubResponses[contentId] === undefined) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } + else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } + else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount: subResponsesSucceededCount, + subResponsesFailedCount: subResponsesFailedCount, + }; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +var MutexLockStatus; +(function (MutexLockStatus) { + MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; +})(MutexLockStatus || (MutexLockStatus = {})); +/** + * An async mutex lock. + */ +class Mutex { + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } + else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } + /** + * Unlock a key. + * + * @param key - + */ + static async unlock(key) { + return new Promise((resolve) => { + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); + } + delete this.keys[key]; + resolve(); + }); + } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === undefined) { + this.listeners[key] = [handler]; + } + else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } +} +Mutex.keys = {}; +Mutex.listeners = {}; + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +class BlobBatch { + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); + try { + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); + } + finally { + await Mutex.unlock(this.batch); + } + } + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url; + let credential; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + credentialOrOptions instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrOptions))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); + try { + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url; + let credential; + let tier; + if (typeof urlOrBlobClient === "string" && + ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + credentialOrTier instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrTier))) { + // First overload + url = urlOrBlobClient; + credential = credentialOrTier; + tier = tierOrOptions; + } + else if (urlOrBlobClient instanceof BlobClient) { + // Second overload + url = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier = credentialOrTier; + options = tierOrOptions; + } + else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); + try { + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url, + credential: credential, + }, async () => { + await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); + }); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} +/** + * Inner batch request class which is responsible for assembling and serializing sub requests. + * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. + */ +class InnerBatchRequest { + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + // batch_{batchid} + this.boundary = `batch_${tempGuid}`; + // --batch_{batchid} + // Content-Type: application/http + // Content-Transfer-Encoding: binary + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + // multipart/mixed; boundary=batch_{batchid} + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + // --batch_{batchid}-- + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = new Map(); + } + /** + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer + factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) + ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire + return new Pipeline(factories, {}); + } + appendSubRequestToBody(request) { + // Start to assemble sub request + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + } + this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line + // No body to assemble for current batch request support + // End to assemble sub request + } + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + // Fast fail if url for sub request is invalid + const path = getURLPath(subRequest.url); + if (!path || path === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } +} +class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders(), + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; // Intercept request from going to wire + } +} +class BatchRequestAssemblePolicyFactory { + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } +} +class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; + } + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return this._nextPolicy.sendRequest(request); + } +} +class BatchHeaderFilterPolicyFactory { + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +class BlobBatchClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if (!credentialOrPipeline) { + // no credential provided + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const path = getURLPath(url); + if (path && path !== "/") { + // Container scoped. + this.serviceOrContainerContext = new Container(storageClientContext); + } + else { + this.serviceOrContainerContext = new Service(storageClientContext); + } + } + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } + else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } + else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } + } + return this.submitBatch(batch); + } + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); + try { + const batchRequestBody = batchRequest.getHttpRequestBody(); + // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount, + }; + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +class ContainerClient extends StorageClient { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + let url; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + // (url: string, pipeline: Pipeline) + url = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } + else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + credentialOrPipelineOrContainerName instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + url = urlOrConnectionString; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } + else if (!credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName !== "string") { + // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) + // The second parameter is undefined. Use anonymous credential. + url = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else if (credentialOrPipelineOrContainerName && + typeof credentialOrPipelineOrContainerName === "string") { + // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) + const containerName = credentialOrPipelineOrContainerName; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + url = + appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + + "?" + + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + else { + throw new Error("Expecting non-empty strings for containerName parameter"); + } + super(url, pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); + } + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); + try { + // Spread operator in destructuring assignments, + // this will filter out unwanted properties from the response object into result object + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + }); + return true; + } + catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence", + }); + return false; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + try { + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists.", + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + async setMetadata(metadata, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + try { + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + try { + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version, + }; + for (const identifier of response) { + let accessPolicy = undefined; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions, + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id, + }); + } + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + async setAccessPolicy(access, containerAcl, options = {}) { + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + try { + const acl = []; + for (const identifier of containerAcl || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn + ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) + : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn + ? truncatedISO8061Date(identifier.accessPolicy.startsOn) + : "", + }, + id: identifier.id, + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body, contentLength, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + try { + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body, contentLength, updatedOptions); + return { + blockBlobClient, + response, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + try { + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + async listBlobFlatSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + try { + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + try { + const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }) }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listBlobsFlatSegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); + marker = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options = {}) { + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blobs + const iter = this.listItems(updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + listHierarchySegments(delimiter, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; + if (!!marker || marker === undefined) { + do { + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); + marker = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listItemsByHierarchy(delimiter, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter, options = {}) { + if (delimiter === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include = []; + if (options.includeCopy) { + include.push("copy"); + } + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSnapshots) { + include.push("snapshots"); + } + if (options.includeVersions) { + include.push("versions"); + } + if (options.includeUncommitedBlobs) { + include.push("uncommittedblobs"); + } + if (options.includeTags) { + include.push("tags"); + } + if (options.includeDeletedWithVersions) { + include.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = undefined; + } + const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); + // AsyncIterableIterator to iterate over blob prefixes and blobs + const iter = this.listItemsByHierarchy(delimiter, updatedOptions); + return { + /** + * The next method, part of the iteration protocol + */ + async next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + getContainerNameFromUrl() { + let containerName; + try { + // URL may look like the following + // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; + // "https://myaccount.blob.core.windows.net/mycontainer"; + // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` + // http://localhost:10001/devstoreaccount1/containername + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + // "https://myaccount.blob.core.windows.net/containername". + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername + // .getPath() -> /devstoreaccount1/containername + containerName = parsedUrl.getPath().split("/")[2]; + } + else { + // "https://customdomain.com/containername". + // .getPath() -> /containername + containerName = parsedUrl.getPath().split("/")[1]; + } + // decode the encoded containerName - to get all the special characters that might be present in it + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return containerName; + } + catch (error) { + throw new Error("Unable to extract containerName with provided information."); + } + } + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +class AccountSASPermissions { + constructor() { + /** + * Permission to read resources and list queues and tables granted. + */ + this.read = false; + /** + * Permission to write resources granted. + */ + this.write = false; + /** + * Permission to create blobs and files granted. + */ + this.delete = false; + /** + * Permission to delete versions granted. + */ + this.deleteVersion = false; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + this.list = false; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + this.add = false; + /** + * Permission to create blobs and files granted. + */ + this.create = false; + /** + * Permissions to update messages and table entities granted. + */ + this.update = false; + /** + * Permission to get and delete messages granted. + */ + this.process = false; + /** + * Specfies Tag access granted. + */ + this.tag = false; + /** + * Permission to filter blobs. + */ + this.filter = false; + /** + * Permission to set immutability policy. + */ + this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + } + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions) { + const accountSASPermissions = new AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; + } + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike) { + const accountSASPermissions = new AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; + } + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + // The order of the characters should be as specified here to ensure correctness: + // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + // Use a string array instead of string concatenating += operator for performance + const permissions = []; + if (this.read) { + permissions.push("r"); + } + if (this.write) { + permissions.push("w"); + } + if (this.delete) { + permissions.push("d"); + } + if (this.deleteVersion) { + permissions.push("x"); + } + if (this.filter) { + permissions.push("f"); + } + if (this.tag) { + permissions.push("t"); + } + if (this.list) { + permissions.push("l"); + } + if (this.add) { + permissions.push("a"); + } + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); + } + return permissions.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +class AccountSASResourceTypes { + constructor() { + /** + * Permission to access service level APIs granted. + */ + this.service = false; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + this.container = false; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + this.object = false; + } + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes) { + const accountSASResourceTypes = new AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } + } + return accountSASResourceTypes; + } + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); + } + if (this.container) { + resourceTypes.push("c"); + } + if (this.object) { + resourceTypes.push("o"); + } + return resourceTypes.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +class AccountSASServices { + constructor() { + /** + * Permission to access blob resources granted. + */ + this.blob = false; + /** + * Permission to access file resources granted. + */ + this.file = false; + /** + * Permission to access queue resources granted. + */ + this.queue = false; + /** + * Permission to access table resources granted. + */ + this.table = false; + } + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; + } + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version = accountSASSignatureValues.version + ? accountSASSignatureValues.version + : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.setImmutabilityPolicy && + version < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.deleteVersion && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.tag && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.filter && + version < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +class BlobServiceClient extends StorageClient { + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreHttp.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = new Service(this.storageClientContext); + } + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + options = options || {}; + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + const pipeline = newPipeline(sharedKeyCredential, options); + return new BlobServiceClient(extractedCreds.url, pipeline); + } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } + else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } + else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); + } + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse, + }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + try { + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); + try { + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, + deletedContainerVersion }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); + try { + const containerClient = this.getContainerClient(destinationContainerName); + // Hack to access a protected member. + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); + try { + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); + try { + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); + try { + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); + try { + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + async listContainersSegment(marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); + try { + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); + try { + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_1, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_1) throw e_1.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + listSegments(marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + let listContainersSegmentResponse; + if (!!marker || marker === undefined) { + do { + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); + listContainersSegmentResponse.containerItems = + listContainersSegmentResponse.containerItems || []; + marker = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); + } while (marker); + } + }); + } + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, function* listItems_1() { + var e_2, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_2) throw e_2.error; } + } + }); + } + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = undefined; + } + const include = []; + if (options.includeDeleted) { + include.push("deleted"); + } + if (options.includeMetadata) { + include.push("metadata"); + } + if (options.includeSystem) { + include.push("system"); + } + // AsyncIterableIterator to iterate over containers + const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); + const iter = this.listItems(listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, + }; + } + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + async getUserDelegationKey(startsOn, expiresOn, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + try { + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn, false), + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value, + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); + } + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn === undefined) { + const now = new Date(); + expiresOn = new Date(now.getTime() + 3600 * 1000); + } + const sas = generateAccountSASQueryParameters(Object.assign({ permissions, + expiresOn, + resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } +} + +Object.defineProperty(exports, 'BaseRequestPolicy', { + enumerable: true, + get: function () { return coreHttp.BaseRequestPolicy; } +}); +Object.defineProperty(exports, 'HttpHeaders', { + enumerable: true, + get: function () { return coreHttp.HttpHeaders; } +}); +Object.defineProperty(exports, 'RequestPolicyOptions', { + enumerable: true, + get: function () { return coreHttp.RequestPolicyOptions; } +}); +Object.defineProperty(exports, 'RestError', { + enumerable: true, + get: function () { return coreHttp.RestError; } +}); +Object.defineProperty(exports, 'WebResource', { + enumerable: true, + get: function () { return coreHttp.WebResource; } +}); +Object.defineProperty(exports, 'deserializationPolicy', { + enumerable: true, + get: function () { return coreHttp.deserializationPolicy; } +}); +exports.AccountSASPermissions = AccountSASPermissions; +exports.AccountSASResourceTypes = AccountSASResourceTypes; +exports.AccountSASServices = AccountSASServices; +exports.AnonymousCredential = AnonymousCredential; +exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; +exports.AppendBlobClient = AppendBlobClient; +exports.BlobBatch = BlobBatch; +exports.BlobBatchClient = BlobBatchClient; +exports.BlobClient = BlobClient; +exports.BlobLeaseClient = BlobLeaseClient; +exports.BlobSASPermissions = BlobSASPermissions; +exports.BlobServiceClient = BlobServiceClient; +exports.BlockBlobClient = BlockBlobClient; +exports.ContainerClient = ContainerClient; +exports.ContainerSASPermissions = ContainerSASPermissions; +exports.Credential = Credential; +exports.CredentialPolicy = CredentialPolicy; +exports.PageBlobClient = PageBlobClient; +exports.Pipeline = Pipeline; +exports.SASQueryParameters = SASQueryParameters; +exports.StorageBrowserPolicy = StorageBrowserPolicy; +exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; +exports.StorageOAuthScopes = StorageOAuthScopes; +exports.StorageRetryPolicy = StorageRetryPolicy; +exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; +exports.StorageSharedKeyCredential = StorageSharedKeyCredential; +exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; +exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; +exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; +exports.isPipelineLike = isPipelineLike; +exports.logger = logger; +exports.newPipeline = newPipeline; +//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/storage-blob/dist/index.js.map b/node_modules/@azure/storage-blob/dist/index.js.map new file mode 100644 index 0000000000..98e3f5fa07 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sources":["../src/generated/src/models/mappers.ts","../src/generated/src/models/parameters.ts","../src/generated/src/operations/service.ts","../src/generated/src/operations/container.ts","../src/generated/src/operations/blob.ts","../src/generated/src/operations/pageBlob.ts","../src/generated/src/operations/appendBlob.ts","../src/generated/src/operations/blockBlob.ts","../src/log.ts","../src/utils/constants.ts","../src/utils/utils.common.ts","../src/policies/StorageBrowserPolicy.ts","../src/StorageBrowserPolicyFactory.ts","../src/policies/StorageRetryPolicy.ts","../src/StorageRetryPolicyFactory.ts","../src/policies/CredentialPolicy.ts","../src/policies/AnonymousCredentialPolicy.ts","../src/credentials/Credential.ts","../src/credentials/AnonymousCredential.ts","../src/policies/TelemetryPolicy.ts","../src/TelemetryPolicyFactory.ts","../src/utils/cache.ts","../src/policies/StorageBearerTokenChallengeAuthenticationPolicy.ts","../src/Pipeline.ts","../src/policies/StorageSharedKeyCredentialPolicy.ts","../src/credentials/StorageSharedKeyCredential.ts","../src/generated/src/storageClientContext.ts","../src/StorageClient.ts","../src/utils/tracing.ts","../src/sas/BlobSASPermissions.ts","../src/sas/ContainerSASPermissions.ts","../src/credentials/UserDelegationKeyCredential.ts","../src/sas/SasIPRange.ts","../src/sas/SASQueryParameters.ts","../src/sas/BlobSASSignatureValues.ts","../src/BlobLeaseClient.ts","../src/utils/RetriableReadableStream.ts","../src/BlobDownloadResponse.ts","../../storage-internal-avro/src/AvroConstants.ts","../../storage-internal-avro/src/AvroParser.ts","../../storage-internal-avro/src/utils/utils.common.ts","../../storage-internal-avro/src/AvroReader.ts","../../storage-internal-avro/src/AvroReadable.ts","../../storage-internal-avro/src/AvroReadableFromStream.ts","../src/utils/BlobQuickQueryStream.ts","../src/BlobQueryResponse.ts","../src/models.ts","../src/PageBlobRangeResponse.ts","../src/pollers/BlobStartCopyFromUrlPoller.ts","../src/Range.ts","../src/utils/Batch.ts","../../storage-common/src/BuffersStream.ts","../../storage-common/src/PooledBuffer.ts","../../storage-common/src/BufferScheduler.ts","../src/utils/utils.node.ts","../src/Clients.ts","../src/BatchUtils.ts","../src/BatchResponseParser.ts","../src/utils/Mutex.ts","../src/BlobBatch.ts","../src/BlobBatchClient.ts","../src/ContainerClient.ts","../src/sas/AccountSASPermissions.ts","../src/sas/AccountSASResourceTypes.ts","../src/sas/AccountSASServices.ts","../src/sas/AccountSASSignatureValues.ts","../src/BlobServiceClient.ts"],"sourcesContent":["/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\n\nexport const BlobServiceProperties: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceProperties\",\n xmlName: \"StorageServiceProperties\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceProperties\",\n modelProperties: {\n blobAnalyticsLogging: {\n serializedName: \"Logging\",\n xmlName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\"\n }\n },\n hourMetrics: {\n serializedName: \"HourMetrics\",\n xmlName: \"HourMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n minuteMetrics: {\n serializedName: \"MinuteMetrics\",\n xmlName: \"MinuteMetrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\"\n }\n },\n cors: {\n serializedName: \"Cors\",\n xmlName: \"Cors\",\n xmlIsWrapped: true,\n xmlElementName: \"CorsRule\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"CorsRule\"\n }\n }\n }\n },\n defaultServiceVersion: {\n serializedName: \"DefaultServiceVersion\",\n xmlName: \"DefaultServiceVersion\",\n type: {\n name: \"String\"\n }\n },\n deleteRetentionPolicy: {\n serializedName: \"DeleteRetentionPolicy\",\n xmlName: \"DeleteRetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n },\n staticWebsite: {\n serializedName: \"StaticWebsite\",\n xmlName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\"\n }\n }\n }\n }\n};\n\nexport const Logging: coreHttp.CompositeMapper = {\n serializedName: \"Logging\",\n type: {\n name: \"Composite\",\n className: \"Logging\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n required: true,\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n deleteProperty: {\n serializedName: \"Delete\",\n required: true,\n xmlName: \"Delete\",\n type: {\n name: \"Boolean\"\n }\n },\n read: {\n serializedName: \"Read\",\n required: true,\n xmlName: \"Read\",\n type: {\n name: \"Boolean\"\n }\n },\n write: {\n serializedName: \"Write\",\n required: true,\n xmlName: \"Write\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const RetentionPolicy: coreHttp.CompositeMapper = {\n serializedName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n days: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"Days\",\n xmlName: \"Days\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const Metrics: coreHttp.CompositeMapper = {\n serializedName: \"Metrics\",\n type: {\n name: \"Composite\",\n className: \"Metrics\",\n modelProperties: {\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n includeAPIs: {\n serializedName: \"IncludeAPIs\",\n xmlName: \"IncludeAPIs\",\n type: {\n name: \"Boolean\"\n }\n },\n retentionPolicy: {\n serializedName: \"RetentionPolicy\",\n xmlName: \"RetentionPolicy\",\n type: {\n name: \"Composite\",\n className: \"RetentionPolicy\"\n }\n }\n }\n }\n};\n\nexport const CorsRule: coreHttp.CompositeMapper = {\n serializedName: \"CorsRule\",\n type: {\n name: \"Composite\",\n className: \"CorsRule\",\n modelProperties: {\n allowedOrigins: {\n serializedName: \"AllowedOrigins\",\n required: true,\n xmlName: \"AllowedOrigins\",\n type: {\n name: \"String\"\n }\n },\n allowedMethods: {\n serializedName: \"AllowedMethods\",\n required: true,\n xmlName: \"AllowedMethods\",\n type: {\n name: \"String\"\n }\n },\n allowedHeaders: {\n serializedName: \"AllowedHeaders\",\n required: true,\n xmlName: \"AllowedHeaders\",\n type: {\n name: \"String\"\n }\n },\n exposedHeaders: {\n serializedName: \"ExposedHeaders\",\n required: true,\n xmlName: \"ExposedHeaders\",\n type: {\n name: \"String\"\n }\n },\n maxAgeInSeconds: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"MaxAgeInSeconds\",\n required: true,\n xmlName: \"MaxAgeInSeconds\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const StaticWebsite: coreHttp.CompositeMapper = {\n serializedName: \"StaticWebsite\",\n type: {\n name: \"Composite\",\n className: \"StaticWebsite\",\n modelProperties: {\n enabled: {\n serializedName: \"Enabled\",\n required: true,\n xmlName: \"Enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n indexDocument: {\n serializedName: \"IndexDocument\",\n xmlName: \"IndexDocument\",\n type: {\n name: \"String\"\n }\n },\n errorDocument404Path: {\n serializedName: \"ErrorDocument404Path\",\n xmlName: \"ErrorDocument404Path\",\n type: {\n name: \"String\"\n }\n },\n defaultIndexDocumentPath: {\n serializedName: \"DefaultIndexDocumentPath\",\n xmlName: \"DefaultIndexDocumentPath\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const StorageError: coreHttp.CompositeMapper = {\n serializedName: \"StorageError\",\n type: {\n name: \"Composite\",\n className: \"StorageError\",\n modelProperties: {\n message: {\n serializedName: \"Message\",\n xmlName: \"Message\",\n type: {\n name: \"String\"\n }\n },\n code: {\n serializedName: \"Code\",\n xmlName: \"Code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobServiceStatistics: coreHttp.CompositeMapper = {\n serializedName: \"BlobServiceStatistics\",\n xmlName: \"StorageServiceStats\",\n type: {\n name: \"Composite\",\n className: \"BlobServiceStatistics\",\n modelProperties: {\n geoReplication: {\n serializedName: \"GeoReplication\",\n xmlName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\"\n }\n }\n }\n }\n};\n\nexport const GeoReplication: coreHttp.CompositeMapper = {\n serializedName: \"GeoReplication\",\n type: {\n name: \"Composite\",\n className: \"GeoReplication\",\n modelProperties: {\n status: {\n serializedName: \"Status\",\n required: true,\n xmlName: \"Status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"live\", \"bootstrap\", \"unavailable\"]\n }\n },\n lastSyncOn: {\n serializedName: \"LastSyncTime\",\n required: true,\n xmlName: \"LastSyncTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ListContainersSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListContainersSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListContainersSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n containerItems: {\n serializedName: \"ContainerItems\",\n required: true,\n xmlName: \"Containers\",\n xmlIsWrapped: true,\n xmlElementName: \"Container\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ContainerItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerItem: coreHttp.CompositeMapper = {\n serializedName: \"ContainerItem\",\n xmlName: \"Container\",\n type: {\n name: \"Composite\",\n className: \"ContainerItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n version: {\n serializedName: \"Version\",\n xmlName: \"Version\",\n type: {\n name: \"String\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n }\n }\n }\n};\n\nexport const ContainerProperties: coreHttp.CompositeMapper = {\n serializedName: \"ContainerProperties\",\n type: {\n name: \"Composite\",\n className: \"ContainerProperties\",\n modelProperties: {\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n publicAccess: {\n serializedName: \"PublicAccess\",\n xmlName: \"PublicAccess\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"HasImmutabilityPolicy\",\n xmlName: \"HasImmutabilityPolicy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"HasLegalHold\",\n xmlName: \"HasLegalHold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"DefaultEncryptionScope\",\n xmlName: \"DefaultEncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n preventEncryptionScopeOverride: {\n serializedName: \"DenyEncryptionScopeOverride\",\n xmlName: \"DenyEncryptionScopeOverride\",\n type: {\n name: \"Boolean\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"ImmutableStorageWithVersioningEnabled\",\n xmlName: \"ImmutableStorageWithVersioningEnabled\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const KeyInfo: coreHttp.CompositeMapper = {\n serializedName: \"KeyInfo\",\n type: {\n name: \"Composite\",\n className: \"KeyInfo\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n required: true,\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const UserDelegationKey: coreHttp.CompositeMapper = {\n serializedName: \"UserDelegationKey\",\n type: {\n name: \"Composite\",\n className: \"UserDelegationKey\",\n modelProperties: {\n signedObjectId: {\n serializedName: \"SignedOid\",\n required: true,\n xmlName: \"SignedOid\",\n type: {\n name: \"String\"\n }\n },\n signedTenantId: {\n serializedName: \"SignedTid\",\n required: true,\n xmlName: \"SignedTid\",\n type: {\n name: \"String\"\n }\n },\n signedStartsOn: {\n serializedName: \"SignedStart\",\n required: true,\n xmlName: \"SignedStart\",\n type: {\n name: \"String\"\n }\n },\n signedExpiresOn: {\n serializedName: \"SignedExpiry\",\n required: true,\n xmlName: \"SignedExpiry\",\n type: {\n name: \"String\"\n }\n },\n signedService: {\n serializedName: \"SignedService\",\n required: true,\n xmlName: \"SignedService\",\n type: {\n name: \"String\"\n }\n },\n signedVersion: {\n serializedName: \"SignedVersion\",\n required: true,\n xmlName: \"SignedVersion\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobSegment: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobSegment\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobSegment\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n where: {\n serializedName: \"Where\",\n required: true,\n xmlName: \"Where\",\n type: {\n name: \"String\"\n }\n },\n blobs: {\n serializedName: \"Blobs\",\n required: true,\n xmlName: \"Blobs\",\n xmlIsWrapped: true,\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const FilterBlobItem: coreHttp.CompositeMapper = {\n serializedName: \"FilterBlobItem\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"FilterBlobItem\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n type: {\n name: \"String\"\n }\n },\n tags: {\n serializedName: \"Tags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n }\n }\n }\n};\n\nexport const BlobTags: coreHttp.CompositeMapper = {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\",\n modelProperties: {\n blobTagSet: {\n serializedName: \"BlobTagSet\",\n required: true,\n xmlName: \"TagSet\",\n xmlIsWrapped: true,\n xmlElementName: \"Tag\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobTag\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobTag: coreHttp.CompositeMapper = {\n serializedName: \"BlobTag\",\n xmlName: \"Tag\",\n type: {\n name: \"Composite\",\n className: \"BlobTag\",\n modelProperties: {\n key: {\n serializedName: \"Key\",\n required: true,\n xmlName: \"Key\",\n type: {\n name: \"String\"\n }\n },\n value: {\n serializedName: \"Value\",\n required: true,\n xmlName: \"Value\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const SignedIdentifier: coreHttp.CompositeMapper = {\n serializedName: \"SignedIdentifier\",\n xmlName: \"SignedIdentifier\",\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\",\n modelProperties: {\n id: {\n serializedName: \"Id\",\n required: true,\n xmlName: \"Id\",\n type: {\n name: \"String\"\n }\n },\n accessPolicy: {\n serializedName: \"AccessPolicy\",\n xmlName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\"\n }\n }\n }\n }\n};\n\nexport const AccessPolicy: coreHttp.CompositeMapper = {\n serializedName: \"AccessPolicy\",\n type: {\n name: \"Composite\",\n className: \"AccessPolicy\",\n modelProperties: {\n startsOn: {\n serializedName: \"Start\",\n xmlName: \"Start\",\n type: {\n name: \"String\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry\",\n xmlName: \"Expiry\",\n type: {\n name: \"String\"\n }\n },\n permissions: {\n serializedName: \"Permission\",\n xmlName: \"Permission\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsFlatSegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsFlatSegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsFlatSegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobFlatListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobFlatListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobFlatListSegment\",\n modelProperties: {\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobItemInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobItemInternal\",\n xmlName: \"Blob\",\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n },\n deleted: {\n serializedName: \"Deleted\",\n required: true,\n xmlName: \"Deleted\",\n type: {\n name: \"Boolean\"\n }\n },\n snapshot: {\n serializedName: \"Snapshot\",\n required: true,\n xmlName: \"Snapshot\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"VersionId\",\n xmlName: \"VersionId\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"IsCurrentVersion\",\n xmlName: \"IsCurrentVersion\",\n type: {\n name: \"Boolean\"\n }\n },\n properties: {\n serializedName: \"Properties\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\"\n }\n },\n metadata: {\n serializedName: \"Metadata\",\n xmlName: \"Metadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n blobTags: {\n serializedName: \"BlobTags\",\n xmlName: \"Tags\",\n type: {\n name: \"Composite\",\n className: \"BlobTags\"\n }\n },\n objectReplicationMetadata: {\n serializedName: \"ObjectReplicationMetadata\",\n xmlName: \"OrMetadata\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n hasVersionsOnly: {\n serializedName: \"HasVersionsOnly\",\n xmlName: \"HasVersionsOnly\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobName: coreHttp.CompositeMapper = {\n serializedName: \"BlobName\",\n type: {\n name: \"Composite\",\n className: \"BlobName\",\n modelProperties: {\n encoded: {\n serializedName: \"Encoded\",\n xmlName: \"Encoded\",\n xmlIsAttribute: true,\n type: {\n name: \"Boolean\"\n }\n },\n content: {\n serializedName: \"content\",\n xmlName: \"content\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobPropertiesInternal: coreHttp.CompositeMapper = {\n serializedName: \"BlobPropertiesInternal\",\n xmlName: \"Properties\",\n type: {\n name: \"Composite\",\n className: \"BlobPropertiesInternal\",\n modelProperties: {\n createdOn: {\n serializedName: \"Creation-Time\",\n xmlName: \"Creation-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n lastModified: {\n serializedName: \"Last-Modified\",\n required: true,\n xmlName: \"Last-Modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"Etag\",\n required: true,\n xmlName: \"Etag\",\n type: {\n name: \"String\"\n }\n },\n contentLength: {\n serializedName: \"Content-Length\",\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"Content-Type\",\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n },\n contentEncoding: {\n serializedName: \"Content-Encoding\",\n xmlName: \"Content-Encoding\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"Content-Language\",\n xmlName: \"Content-Language\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentDisposition: {\n serializedName: \"Content-Disposition\",\n xmlName: \"Content-Disposition\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"Cache-Control\",\n xmlName: \"Cache-Control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"BlobType\",\n xmlName: \"BlobType\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n leaseStatus: {\n serializedName: \"LeaseStatus\",\n xmlName: \"LeaseStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n leaseState: {\n serializedName: \"LeaseState\",\n xmlName: \"LeaseState\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseDuration: {\n serializedName: \"LeaseDuration\",\n xmlName: \"LeaseDuration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n copyId: {\n serializedName: \"CopyId\",\n xmlName: \"CopyId\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"CopyStatus\",\n xmlName: \"CopyStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n copySource: {\n serializedName: \"CopySource\",\n xmlName: \"CopySource\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"CopyProgress\",\n xmlName: \"CopyProgress\",\n type: {\n name: \"String\"\n }\n },\n copyCompletedOn: {\n serializedName: \"CopyCompletionTime\",\n xmlName: \"CopyCompletionTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"CopyStatusDescription\",\n xmlName: \"CopyStatusDescription\",\n type: {\n name: \"String\"\n }\n },\n serverEncrypted: {\n serializedName: \"ServerEncrypted\",\n xmlName: \"ServerEncrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n incrementalCopy: {\n serializedName: \"IncrementalCopy\",\n xmlName: \"IncrementalCopy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"DestinationSnapshot\",\n xmlName: \"DestinationSnapshot\",\n type: {\n name: \"String\"\n }\n },\n deletedOn: {\n serializedName: \"DeletedTime\",\n xmlName: \"DeletedTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n remainingRetentionDays: {\n serializedName: \"RemainingRetentionDays\",\n xmlName: \"RemainingRetentionDays\",\n type: {\n name: \"Number\"\n }\n },\n accessTier: {\n serializedName: \"AccessTier\",\n xmlName: \"AccessTier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n },\n accessTierInferred: {\n serializedName: \"AccessTierInferred\",\n xmlName: \"AccessTierInferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"ArchiveStatus\",\n xmlName: \"ArchiveStatus\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"rehydrate-pending-to-hot\",\n \"rehydrate-pending-to-cool\"\n ]\n }\n },\n customerProvidedKeySha256: {\n serializedName: \"CustomerProvidedKeySha256\",\n xmlName: \"CustomerProvidedKeySha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"EncryptionScope\",\n xmlName: \"EncryptionScope\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"AccessTierChangeTime\",\n xmlName: \"AccessTierChangeTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n tagCount: {\n serializedName: \"TagCount\",\n xmlName: \"TagCount\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"Expiry-Time\",\n xmlName: \"Expiry-Time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"Sealed\",\n xmlName: \"Sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"RehydratePriority\",\n xmlName: \"RehydratePriority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessedOn: {\n serializedName: \"LastAccessTime\",\n xmlName: \"LastAccessTime\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"ImmutabilityPolicyUntilDate\",\n xmlName: \"ImmutabilityPolicyUntilDate\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"ImmutabilityPolicyMode\",\n xmlName: \"ImmutabilityPolicyMode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"LegalHold\",\n xmlName: \"LegalHold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const ListBlobsHierarchySegmentResponse: coreHttp.CompositeMapper = {\n serializedName: \"ListBlobsHierarchySegmentResponse\",\n xmlName: \"EnumerationResults\",\n type: {\n name: \"Composite\",\n className: \"ListBlobsHierarchySegmentResponse\",\n modelProperties: {\n serviceEndpoint: {\n serializedName: \"ServiceEndpoint\",\n required: true,\n xmlName: \"ServiceEndpoint\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n containerName: {\n serializedName: \"ContainerName\",\n required: true,\n xmlName: \"ContainerName\",\n xmlIsAttribute: true,\n type: {\n name: \"String\"\n }\n },\n prefix: {\n serializedName: \"Prefix\",\n xmlName: \"Prefix\",\n type: {\n name: \"String\"\n }\n },\n marker: {\n serializedName: \"Marker\",\n xmlName: \"Marker\",\n type: {\n name: \"String\"\n }\n },\n maxPageSize: {\n serializedName: \"MaxResults\",\n xmlName: \"MaxResults\",\n type: {\n name: \"Number\"\n }\n },\n delimiter: {\n serializedName: \"Delimiter\",\n xmlName: \"Delimiter\",\n type: {\n name: \"String\"\n }\n },\n segment: {\n serializedName: \"Segment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\"\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobHierarchyListSegment: coreHttp.CompositeMapper = {\n serializedName: \"BlobHierarchyListSegment\",\n xmlName: \"Blobs\",\n type: {\n name: \"Composite\",\n className: \"BlobHierarchyListSegment\",\n modelProperties: {\n blobPrefixes: {\n serializedName: \"BlobPrefixes\",\n xmlName: \"BlobPrefixes\",\n xmlElementName: \"BlobPrefix\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\"\n }\n }\n }\n },\n blobItems: {\n serializedName: \"BlobItems\",\n required: true,\n xmlName: \"BlobItems\",\n xmlElementName: \"Blob\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"BlobItemInternal\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlobPrefix: coreHttp.CompositeMapper = {\n serializedName: \"BlobPrefix\",\n type: {\n name: \"Composite\",\n className: \"BlobPrefix\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"Composite\",\n className: \"BlobName\"\n }\n }\n }\n }\n};\n\nexport const BlockLookupList: coreHttp.CompositeMapper = {\n serializedName: \"BlockLookupList\",\n xmlName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockLookupList\",\n modelProperties: {\n committed: {\n serializedName: \"Committed\",\n xmlName: \"Committed\",\n xmlElementName: \"Committed\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n uncommitted: {\n serializedName: \"Uncommitted\",\n xmlName: \"Uncommitted\",\n xmlElementName: \"Uncommitted\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n },\n latest: {\n serializedName: \"Latest\",\n xmlName: \"Latest\",\n xmlElementName: \"Latest\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"String\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const BlockList: coreHttp.CompositeMapper = {\n serializedName: \"BlockList\",\n type: {\n name: \"Composite\",\n className: \"BlockList\",\n modelProperties: {\n committedBlocks: {\n serializedName: \"CommittedBlocks\",\n xmlName: \"CommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n },\n uncommittedBlocks: {\n serializedName: \"UncommittedBlocks\",\n xmlName: \"UncommittedBlocks\",\n xmlIsWrapped: true,\n xmlElementName: \"Block\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"Block\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const Block: coreHttp.CompositeMapper = {\n serializedName: \"Block\",\n type: {\n name: \"Composite\",\n className: \"Block\",\n modelProperties: {\n name: {\n serializedName: \"Name\",\n required: true,\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n size: {\n serializedName: \"Size\",\n required: true,\n xmlName: \"Size\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const PageList: coreHttp.CompositeMapper = {\n serializedName: \"PageList\",\n type: {\n name: \"Composite\",\n className: \"PageList\",\n modelProperties: {\n pageRange: {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n xmlElementName: \"PageRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"PageRange\"\n }\n }\n }\n },\n clearRange: {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n xmlElementName: \"ClearRange\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ClearRange\"\n }\n }\n }\n },\n continuationToken: {\n serializedName: \"NextMarker\",\n xmlName: \"NextMarker\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageRange: coreHttp.CompositeMapper = {\n serializedName: \"PageRange\",\n xmlName: \"PageRange\",\n type: {\n name: \"Composite\",\n className: \"PageRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ClearRange: coreHttp.CompositeMapper = {\n serializedName: \"ClearRange\",\n xmlName: \"ClearRange\",\n type: {\n name: \"Composite\",\n className: \"ClearRange\",\n modelProperties: {\n start: {\n serializedName: \"Start\",\n required: true,\n xmlName: \"Start\",\n type: {\n name: \"Number\"\n }\n },\n end: {\n serializedName: \"End\",\n required: true,\n xmlName: \"End\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const QueryRequest: coreHttp.CompositeMapper = {\n serializedName: \"QueryRequest\",\n xmlName: \"QueryRequest\",\n type: {\n name: \"Composite\",\n className: \"QueryRequest\",\n modelProperties: {\n queryType: {\n serializedName: \"QueryType\",\n required: true,\n xmlName: \"QueryType\",\n type: {\n name: \"String\"\n }\n },\n expression: {\n serializedName: \"Expression\",\n required: true,\n xmlName: \"Expression\",\n type: {\n name: \"String\"\n }\n },\n inputSerialization: {\n serializedName: \"InputSerialization\",\n xmlName: \"InputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n },\n outputSerialization: {\n serializedName: \"OutputSerialization\",\n xmlName: \"OutputSerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\"\n }\n }\n }\n }\n};\n\nexport const QuerySerialization: coreHttp.CompositeMapper = {\n serializedName: \"QuerySerialization\",\n type: {\n name: \"Composite\",\n className: \"QuerySerialization\",\n modelProperties: {\n format: {\n serializedName: \"Format\",\n xmlName: \"Format\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\"\n }\n }\n }\n }\n};\n\nexport const QueryFormat: coreHttp.CompositeMapper = {\n serializedName: \"QueryFormat\",\n type: {\n name: \"Composite\",\n className: \"QueryFormat\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"delimited\", \"json\", \"arrow\", \"parquet\"]\n }\n },\n delimitedTextConfiguration: {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\"\n }\n },\n jsonTextConfiguration: {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\"\n }\n },\n arrowConfiguration: {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\"\n }\n },\n parquetTextConfiguration: {\n serializedName: \"ParquetTextConfiguration\",\n xmlName: \"ParquetTextConfiguration\",\n type: {\n name: \"any\"\n }\n }\n }\n }\n};\n\nexport const DelimitedTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"DelimitedTextConfiguration\",\n xmlName: \"DelimitedTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"DelimitedTextConfiguration\",\n modelProperties: {\n columnSeparator: {\n serializedName: \"ColumnSeparator\",\n xmlName: \"ColumnSeparator\",\n type: {\n name: \"String\"\n }\n },\n fieldQuote: {\n serializedName: \"FieldQuote\",\n xmlName: \"FieldQuote\",\n type: {\n name: \"String\"\n }\n },\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n },\n escapeChar: {\n serializedName: \"EscapeChar\",\n xmlName: \"EscapeChar\",\n type: {\n name: \"String\"\n }\n },\n headersPresent: {\n serializedName: \"HeadersPresent\",\n xmlName: \"HasHeaders\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const JsonTextConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"JsonTextConfiguration\",\n xmlName: \"JsonTextConfiguration\",\n type: {\n name: \"Composite\",\n className: \"JsonTextConfiguration\",\n modelProperties: {\n recordSeparator: {\n serializedName: \"RecordSeparator\",\n xmlName: \"RecordSeparator\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ArrowConfiguration: coreHttp.CompositeMapper = {\n serializedName: \"ArrowConfiguration\",\n xmlName: \"ArrowConfiguration\",\n type: {\n name: \"Composite\",\n className: \"ArrowConfiguration\",\n modelProperties: {\n schema: {\n serializedName: \"Schema\",\n required: true,\n xmlName: \"Schema\",\n xmlIsWrapped: true,\n xmlElementName: \"Field\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"ArrowField\"\n }\n }\n }\n }\n }\n }\n};\n\nexport const ArrowField: coreHttp.CompositeMapper = {\n serializedName: \"ArrowField\",\n xmlName: \"Field\",\n type: {\n name: \"Composite\",\n className: \"ArrowField\",\n modelProperties: {\n type: {\n serializedName: \"Type\",\n required: true,\n xmlName: \"Type\",\n type: {\n name: \"String\"\n }\n },\n name: {\n serializedName: \"Name\",\n xmlName: \"Name\",\n type: {\n name: \"String\"\n }\n },\n precision: {\n serializedName: \"Precision\",\n xmlName: \"Precision\",\n type: {\n name: \"Number\"\n }\n },\n scale: {\n serializedName: \"Scale\",\n xmlName: \"Scale\",\n type: {\n name: \"Number\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_setPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetStatisticsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getStatisticsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetStatisticsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceListContainersSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_listContainersSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceListContainersSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetUserDelegationKeyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getUserDelegationKeyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetUserDelegationKeyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n },\n isHierarchicalNamespaceEnabled: {\n serializedName: \"x-ms-is-hns-enabled\",\n xmlName: \"x-ms-is-hns-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ServiceFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Service_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ServiceFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesHeaders\",\n modelProperties: {\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n hasImmutabilityPolicy: {\n serializedName: \"x-ms-has-immutability-policy\",\n xmlName: \"x-ms-has-immutability-policy\",\n type: {\n name: \"Boolean\"\n }\n },\n hasLegalHold: {\n serializedName: \"x-ms-has-legal-hold\",\n xmlName: \"x-ms-has-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n defaultEncryptionScope: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n denyEncryptionScopeOverride: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n },\n isImmutableStorageWithVersioningEnabled: {\n serializedName: \"x-ms-immutable-storage-with-versioning-enabled\",\n xmlName: \"x-ms-immutable-storage-with-versioning-enabled\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyHeaders\",\n modelProperties: {\n blobPublicAccess: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSetAccessPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_setAccessPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSetAccessPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRestoreExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_restoreExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRestoreExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenameExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renameExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenameExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerSubmitBatchExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_submitBatchExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerSubmitBatchExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerFilterBlobsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_filterBlobsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerFilterBlobsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const ContainerChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobFlatSegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobFlatSegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobFlatSegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentHeaders\",\n modelProperties: {\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerListBlobHierarchySegmentExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_listBlobHierarchySegmentExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerListBlobHierarchySegmentExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const ContainerGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Container_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"ContainerGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobDownloadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_downloadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDownloadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n createdOn: {\n serializedName: \"x-ms-creation-time\",\n xmlName: \"x-ms-creation-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n },\n objectReplicationPolicyId: {\n serializedName: \"x-ms-or-policy-id\",\n xmlName: \"x-ms-or-policy-id\",\n type: {\n name: \"String\"\n }\n },\n objectReplicationRules: {\n serializedName: \"x-ms-or\",\n xmlName: \"x-ms-or\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-or-\"\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletedOn: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n isIncrementalCopy: {\n serializedName: \"x-ms-incremental-copy\",\n xmlName: \"x-ms-incremental-copy\",\n type: {\n name: \"Boolean\"\n }\n },\n destinationSnapshot: {\n serializedName: \"x-ms-copy-destination-snapshot\",\n xmlName: \"x-ms-copy-destination-snapshot\",\n type: {\n name: \"String\"\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n accessTier: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"String\"\n }\n },\n accessTierInferred: {\n serializedName: \"x-ms-access-tier-inferred\",\n xmlName: \"x-ms-access-tier-inferred\",\n type: {\n name: \"Boolean\"\n }\n },\n archiveStatus: {\n serializedName: \"x-ms-archive-status\",\n xmlName: \"x-ms-archive-status\",\n type: {\n name: \"String\"\n }\n },\n accessTierChangedOn: {\n serializedName: \"x-ms-access-tier-change-time\",\n xmlName: \"x-ms-access-tier-change-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n isCurrentVersion: {\n serializedName: \"x-ms-is-current-version\",\n xmlName: \"x-ms-is-current-version\",\n type: {\n name: \"Boolean\"\n }\n },\n tagCount: {\n serializedName: \"x-ms-tag-count\",\n xmlName: \"x-ms-tag-count\",\n type: {\n name: \"Number\"\n }\n },\n expiresOn: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n },\n rehydratePriority: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n },\n lastAccessed: {\n serializedName: \"x-ms-last-access-time\",\n xmlName: \"x-ms-last-access-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiresOn: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetPropertiesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getPropertiesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetPropertiesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobUndeleteExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_undeleteExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobUndeleteExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobSetExpiryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setExpiryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetExpiryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetHttpHeadersExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setHttpHeadersExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetHttpHeadersExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyExpiry: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n immutabilityPolicyMode: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n }\n }\n};\n\nexport const BlobSetImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobDeleteImmutabilityPolicyExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_deleteImmutabilityPolicyExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobDeleteImmutabilityPolicyExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n legalHold: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const BlobSetLegalHoldExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setLegalHoldExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetLegalHoldExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetMetadataExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setMetadataExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetMetadataExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobAcquireLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_acquireLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAcquireLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobReleaseLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_releaseLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobReleaseLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobRenewLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_renewLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobRenewLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n leaseId: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobChangeLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_changeLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobChangeLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n leaseTime: {\n serializedName: \"x-ms-lease-time\",\n xmlName: \"x-ms-lease-time\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n }\n }\n};\n\nexport const BlobBreakLeaseExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_breakLeaseExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobBreakLeaseExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotHeaders\",\n modelProperties: {\n snapshot: {\n serializedName: \"x-ms-snapshot\",\n xmlName: \"x-ms-snapshot\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCreateSnapshotExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_createSnapshotExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCreateSnapshotExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobStartCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_startCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobStartCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n defaultValue: \"success\",\n isConstant: true,\n serializedName: \"x-ms-copy-status\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_copyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobAbortCopyFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_abortCopyFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobAbortCopyFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTierExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTierExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTierExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n skuName: {\n serializedName: \"x-ms-sku-name\",\n xmlName: \"x-ms-sku-name\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Standard_LRS\",\n \"Standard_GRS\",\n \"Standard_RAGRS\",\n \"Standard_ZRS\",\n \"Premium_LRS\"\n ]\n }\n },\n accountKind: {\n serializedName: \"x-ms-account-kind\",\n xmlName: \"x-ms-account-kind\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"Storage\",\n \"BlobStorage\",\n \"StorageV2\",\n \"FileStorage\",\n \"BlockBlobStorage\"\n ]\n }\n }\n }\n }\n};\n\nexport const BlobGetAccountInfoExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getAccountInfoExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetAccountInfoExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n metadata: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n }\n },\n contentLength: {\n serializedName: \"content-length\",\n xmlName: \"content-length\",\n type: {\n name: \"Number\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n contentRange: {\n serializedName: \"content-range\",\n xmlName: \"content-range\",\n type: {\n name: \"String\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n contentEncoding: {\n serializedName: \"content-encoding\",\n xmlName: \"content-encoding\",\n type: {\n name: \"String\"\n }\n },\n cacheControl: {\n serializedName: \"cache-control\",\n xmlName: \"cache-control\",\n type: {\n name: \"String\"\n }\n },\n contentDisposition: {\n serializedName: \"content-disposition\",\n xmlName: \"content-disposition\",\n type: {\n name: \"String\"\n }\n },\n contentLanguage: {\n serializedName: \"content-language\",\n xmlName: \"content-language\",\n type: {\n name: \"String\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n blobType: {\n serializedName: \"x-ms-blob-type\",\n xmlName: \"x-ms-blob-type\",\n type: {\n name: \"Enum\",\n allowedValues: [\"BlockBlob\", \"PageBlob\", \"AppendBlob\"]\n }\n },\n copyCompletionTime: {\n serializedName: \"x-ms-copy-completion-time\",\n xmlName: \"x-ms-copy-completion-time\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyStatusDescription: {\n serializedName: \"x-ms-copy-status-description\",\n xmlName: \"x-ms-copy-status-description\",\n type: {\n name: \"String\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyProgress: {\n serializedName: \"x-ms-copy-progress\",\n xmlName: \"x-ms-copy-progress\",\n type: {\n name: \"String\"\n }\n },\n copySource: {\n serializedName: \"x-ms-copy-source\",\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n leaseDuration: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Enum\",\n allowedValues: [\"infinite\", \"fixed\"]\n }\n },\n leaseState: {\n serializedName: \"x-ms-lease-state\",\n xmlName: \"x-ms-lease-state\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"available\",\n \"leased\",\n \"expired\",\n \"breaking\",\n \"broken\"\n ]\n }\n },\n leaseStatus: {\n serializedName: \"x-ms-lease-status\",\n xmlName: \"x-ms-lease-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"locked\", \"unlocked\"]\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n acceptRanges: {\n serializedName: \"accept-ranges\",\n xmlName: \"accept-ranges\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-server-encrypted\",\n xmlName: \"x-ms-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n blobContentMD5: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n },\n contentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n }\n }\n};\n\nexport const BlobQueryExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_queryExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobQueryExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobGetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_getTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobGetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsHeaders\",\n modelProperties: {\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlobSetTagsExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"Blob_setTagsExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlobSetTagsExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobClearPagesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_clearPagesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobClearPagesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUploadPagesFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_uploadPagesFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUploadPagesFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobGetPageRangesDiffExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_getPageRangesDiffExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobGetPageRangesDiffExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobResizeExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_resizeExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobResizeExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobSequenceNumber: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobUpdateSequenceNumberExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_updateSequenceNumberExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobUpdateSequenceNumberExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n copyId: {\n serializedName: \"x-ms-copy-id\",\n xmlName: \"x-ms-copy-id\",\n type: {\n name: \"String\"\n }\n },\n copyStatus: {\n serializedName: \"x-ms-copy-status\",\n xmlName: \"x-ms-copy-status\",\n type: {\n name: \"Enum\",\n allowedValues: [\"pending\", \"success\", \"aborted\", \"failed\"]\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const PageBlobCopyIncrementalExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"PageBlob_copyIncrementalExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"PageBlobCopyIncrementalExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobCreateExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_createExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobCreateExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n blobAppendOffset: {\n serializedName: \"x-ms-blob-append-offset\",\n xmlName: \"x-ms-blob-append-offset\",\n type: {\n name: \"String\"\n }\n },\n blobCommittedBlockCount: {\n serializedName: \"x-ms-blob-committed-block-count\",\n xmlName: \"x-ms-blob-committed-block-count\",\n type: {\n name: \"Number\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobAppendBlockFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_appendBlockFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobAppendBlockFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isSealed: {\n serializedName: \"x-ms-blob-sealed\",\n xmlName: \"x-ms-blob-sealed\",\n type: {\n name: \"Boolean\"\n }\n }\n }\n }\n};\n\nexport const AppendBlobSealExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"AppendBlob_sealExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"AppendBlobSealExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobUploadExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_uploadExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobUploadExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobPutBlobFromUrlExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_putBlobFromUrlExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobPutBlobFromUrlExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLHeaders\",\n modelProperties: {\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobStageBlockFromURLExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_stageBlockFromURLExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobStageBlockFromURLExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListHeaders\",\n modelProperties: {\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n contentMD5: {\n serializedName: \"content-md5\",\n xmlName: \"content-md5\",\n type: {\n name: \"ByteArray\"\n }\n },\n xMsContentCrc64: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n versionId: {\n serializedName: \"x-ms-version-id\",\n xmlName: \"x-ms-version-id\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n isServerEncrypted: {\n serializedName: \"x-ms-request-server-encrypted\",\n xmlName: \"x-ms-request-server-encrypted\",\n type: {\n name: \"Boolean\"\n }\n },\n encryptionKeySha256: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n },\n encryptionScope: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobCommitBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_commitBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobCommitBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListHeaders\",\n modelProperties: {\n lastModified: {\n serializedName: \"last-modified\",\n xmlName: \"last-modified\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n etag: {\n serializedName: \"etag\",\n xmlName: \"etag\",\n type: {\n name: \"String\"\n }\n },\n contentType: {\n serializedName: \"content-type\",\n xmlName: \"content-type\",\n type: {\n name: \"String\"\n }\n },\n blobContentLength: {\n serializedName: \"x-ms-blob-content-length\",\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n },\n clientRequestId: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n },\n requestId: {\n serializedName: \"x-ms-request-id\",\n xmlName: \"x-ms-request-id\",\n type: {\n name: \"String\"\n }\n },\n version: {\n serializedName: \"x-ms-version\",\n xmlName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n },\n date: {\n serializedName: \"date\",\n xmlName: \"date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n },\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n\nexport const BlockBlobGetBlockListExceptionHeaders: coreHttp.CompositeMapper = {\n serializedName: \"BlockBlob_getBlockListExceptionHeaders\",\n type: {\n name: \"Composite\",\n className: \"BlockBlobGetBlockListExceptionHeaders\",\n modelProperties: {\n errorCode: {\n serializedName: \"x-ms-error-code\",\n xmlName: \"x-ms-error-code\",\n type: {\n name: \"String\"\n }\n }\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport {\n OperationParameter,\n OperationURLParameter,\n OperationQueryParameter,\n QueryCollectionFormat\n} from \"@azure/core-http\";\nimport {\n BlobServiceProperties as BlobServicePropertiesMapper,\n KeyInfo as KeyInfoMapper,\n QueryRequest as QueryRequestMapper,\n BlobTags as BlobTagsMapper,\n BlockLookupList as BlockLookupListMapper\n} from \"../models/mappers\";\n\nexport const contentType: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobServiceProperties: OperationParameter = {\n parameterPath: \"blobServiceProperties\",\n mapper: BlobServicePropertiesMapper\n};\n\nexport const accept: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const url: OperationURLParameter = {\n parameterPath: \"url\",\n mapper: {\n serializedName: \"url\",\n required: true,\n xmlName: \"url\",\n type: {\n name: \"String\"\n }\n },\n skipEncoding: true\n};\n\nexport const restype: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"service\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"properties\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const timeoutInSeconds: OperationQueryParameter = {\n parameterPath: [\"options\", \"timeoutInSeconds\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 0\n },\n serializedName: \"timeout\",\n xmlName: \"timeout\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const version: OperationParameter = {\n parameterPath: \"version\",\n mapper: {\n defaultValue: \"2021-08-06\",\n isConstant: true,\n serializedName: \"x-ms-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const requestId: OperationParameter = {\n parameterPath: [\"options\", \"requestId\"],\n mapper: {\n serializedName: \"x-ms-client-request-id\",\n xmlName: \"x-ms-client-request-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const accept1: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp1: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"stats\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp2: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"list\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prefix: OperationQueryParameter = {\n parameterPath: [\"options\", \"prefix\"],\n mapper: {\n serializedName: \"prefix\",\n xmlName: \"prefix\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const marker: OperationQueryParameter = {\n parameterPath: [\"options\", \"marker\"],\n mapper: {\n serializedName: \"marker\",\n xmlName: \"marker\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxPageSize: OperationQueryParameter = {\n parameterPath: [\"options\", \"maxPageSize\"],\n mapper: {\n constraints: {\n InclusiveMinimum: 1\n },\n serializedName: \"maxresults\",\n xmlName: \"maxresults\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const include: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListContainersIncludeType\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\"metadata\", \"deleted\", \"system\"]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const keyInfo: OperationParameter = {\n parameterPath: \"keyInfo\",\n mapper: KeyInfoMapper\n};\n\nexport const comp3: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"userdelegationkey\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype1: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"account\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const comp4: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"batch\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const contentLength: OperationParameter = {\n parameterPath: \"contentLength\",\n mapper: {\n serializedName: \"Content-Length\",\n required: true,\n xmlName: \"Content-Length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const multipartContentType: OperationParameter = {\n parameterPath: \"multipartContentType\",\n mapper: {\n serializedName: \"Content-Type\",\n required: true,\n xmlName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp5: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blobs\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const where: OperationQueryParameter = {\n parameterPath: [\"options\", \"where\"],\n mapper: {\n serializedName: \"where\",\n xmlName: \"where\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const restype2: OperationQueryParameter = {\n parameterPath: \"restype\",\n mapper: {\n defaultValue: \"container\",\n isConstant: true,\n serializedName: \"restype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const metadata: OperationParameter = {\n parameterPath: [\"options\", \"metadata\"],\n mapper: {\n serializedName: \"x-ms-meta\",\n xmlName: \"x-ms-meta\",\n type: {\n name: \"Dictionary\",\n value: { type: { name: \"String\" } }\n },\n headerCollectionPrefix: \"x-ms-meta-\"\n }\n};\n\nexport const access: OperationParameter = {\n parameterPath: [\"options\", \"access\"],\n mapper: {\n serializedName: \"x-ms-blob-public-access\",\n xmlName: \"x-ms-blob-public-access\",\n type: {\n name: \"Enum\",\n allowedValues: [\"container\", \"blob\"]\n }\n }\n};\n\nexport const defaultEncryptionScope: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"defaultEncryptionScope\"\n ],\n mapper: {\n serializedName: \"x-ms-default-encryption-scope\",\n xmlName: \"x-ms-default-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const preventEncryptionScopeOverride: OperationParameter = {\n parameterPath: [\n \"options\",\n \"containerEncryptionScope\",\n \"preventEncryptionScopeOverride\"\n ],\n mapper: {\n serializedName: \"x-ms-deny-encryption-scope-override\",\n xmlName: \"x-ms-deny-encryption-scope-override\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const leaseId: OperationParameter = {\n parameterPath: [\"options\", \"leaseAccessConditions\", \"leaseId\"],\n mapper: {\n serializedName: \"x-ms-lease-id\",\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifModifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifModifiedSince\"],\n mapper: {\n serializedName: \"If-Modified-Since\",\n xmlName: \"If-Modified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const ifUnmodifiedSince: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifUnmodifiedSince\"],\n mapper: {\n serializedName: \"If-Unmodified-Since\",\n xmlName: \"If-Unmodified-Since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const comp6: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"metadata\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp7: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"acl\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const containerAcl: OperationParameter = {\n parameterPath: [\"options\", \"containerAcl\"],\n mapper: {\n serializedName: \"containerAcl\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Composite\",\n className: \"SignedIdentifier\"\n }\n }\n }\n }\n};\n\nexport const comp8: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"undelete\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerName: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerName\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-name\",\n xmlName: \"x-ms-deleted-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deletedContainerVersion: OperationParameter = {\n parameterPath: [\"options\", \"deletedContainerVersion\"],\n mapper: {\n serializedName: \"x-ms-deleted-container-version\",\n xmlName: \"x-ms-deleted-container-version\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp9: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"rename\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContainerName: OperationParameter = {\n parameterPath: \"sourceContainerName\",\n mapper: {\n serializedName: \"x-ms-source-container-name\",\n required: true,\n xmlName: \"x-ms-source-container-name\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"sourceLeaseId\"],\n mapper: {\n serializedName: \"x-ms-source-lease-id\",\n xmlName: \"x-ms-source-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp10: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"lease\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"acquire\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const duration: OperationParameter = {\n parameterPath: [\"options\", \"duration\"],\n mapper: {\n serializedName: \"x-ms-lease-duration\",\n xmlName: \"x-ms-lease-duration\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const proposedLeaseId: OperationParameter = {\n parameterPath: [\"options\", \"proposedLeaseId\"],\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action1: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"release\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const leaseId1: OperationParameter = {\n parameterPath: \"leaseId\",\n mapper: {\n serializedName: \"x-ms-lease-id\",\n required: true,\n xmlName: \"x-ms-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action2: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"renew\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const action3: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"break\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const breakPeriod: OperationParameter = {\n parameterPath: [\"options\", \"breakPeriod\"],\n mapper: {\n serializedName: \"x-ms-lease-break-period\",\n xmlName: \"x-ms-lease-break-period\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const action4: OperationParameter = {\n parameterPath: \"action\",\n mapper: {\n defaultValue: \"change\",\n isConstant: true,\n serializedName: \"x-ms-lease-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const proposedLeaseId1: OperationParameter = {\n parameterPath: \"proposedLeaseId\",\n mapper: {\n serializedName: \"x-ms-proposed-lease-id\",\n required: true,\n xmlName: \"x-ms-proposed-lease-id\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const include1: OperationQueryParameter = {\n parameterPath: [\"options\", \"include\"],\n mapper: {\n serializedName: \"include\",\n xmlName: \"include\",\n xmlElementName: \"ListBlobsIncludeItem\",\n type: {\n name: \"Sequence\",\n element: {\n type: {\n name: \"Enum\",\n allowedValues: [\n \"copy\",\n \"deleted\",\n \"metadata\",\n \"snapshots\",\n \"uncommittedblobs\",\n \"versions\",\n \"tags\",\n \"immutabilitypolicy\",\n \"legalhold\",\n \"deletedwithversions\"\n ]\n }\n }\n }\n },\n collectionFormat: QueryCollectionFormat.Csv\n};\n\nexport const delimiter: OperationQueryParameter = {\n parameterPath: \"delimiter\",\n mapper: {\n serializedName: \"delimiter\",\n required: true,\n xmlName: \"delimiter\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const snapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"snapshot\"],\n mapper: {\n serializedName: \"snapshot\",\n xmlName: \"snapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const versionId: OperationQueryParameter = {\n parameterPath: [\"options\", \"versionId\"],\n mapper: {\n serializedName: \"versionid\",\n xmlName: \"versionid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const range: OperationParameter = {\n parameterPath: [\"options\", \"range\"],\n mapper: {\n serializedName: \"x-ms-range\",\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const rangeGetContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentMD5\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-md5\",\n xmlName: \"x-ms-range-get-content-md5\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const rangeGetContentCRC64: OperationParameter = {\n parameterPath: [\"options\", \"rangeGetContentCRC64\"],\n mapper: {\n serializedName: \"x-ms-range-get-content-crc64\",\n xmlName: \"x-ms-range-get-content-crc64\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionKey: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKey\"],\n mapper: {\n serializedName: \"x-ms-encryption-key\",\n xmlName: \"x-ms-encryption-key\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionKeySha256: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionKeySha256\"],\n mapper: {\n serializedName: \"x-ms-encryption-key-sha256\",\n xmlName: \"x-ms-encryption-key-sha256\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const encryptionAlgorithm: OperationParameter = {\n parameterPath: [\"options\", \"cpkInfo\", \"encryptionAlgorithm\"],\n mapper: {\n serializedName: \"x-ms-encryption-algorithm\",\n xmlName: \"x-ms-encryption-algorithm\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifMatch\"],\n mapper: {\n serializedName: \"If-Match\",\n xmlName: \"If-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifNoneMatch: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifNoneMatch\"],\n mapper: {\n serializedName: \"If-None-Match\",\n xmlName: \"If-None-Match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifTags: OperationParameter = {\n parameterPath: [\"options\", \"modifiedAccessConditions\", \"ifTags\"],\n mapper: {\n serializedName: \"x-ms-if-tags\",\n xmlName: \"x-ms-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const deleteSnapshots: OperationParameter = {\n parameterPath: [\"options\", \"deleteSnapshots\"],\n mapper: {\n serializedName: \"x-ms-delete-snapshots\",\n xmlName: \"x-ms-delete-snapshots\",\n type: {\n name: \"Enum\",\n allowedValues: [\"include\", \"only\"]\n }\n }\n};\n\nexport const blobDeleteType: OperationQueryParameter = {\n parameterPath: [\"options\", \"blobDeleteType\"],\n mapper: {\n serializedName: \"deletetype\",\n xmlName: \"deletetype\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp11: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"expiry\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiryOptions: OperationParameter = {\n parameterPath: \"expiryOptions\",\n mapper: {\n serializedName: \"x-ms-expiry-option\",\n required: true,\n xmlName: \"x-ms-expiry-option\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const expiresOn: OperationParameter = {\n parameterPath: [\"options\", \"expiresOn\"],\n mapper: {\n serializedName: \"x-ms-expiry-time\",\n xmlName: \"x-ms-expiry-time\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobCacheControl: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobCacheControl\"],\n mapper: {\n serializedName: \"x-ms-blob-cache-control\",\n xmlName: \"x-ms-blob-cache-control\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentType: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentType\"],\n mapper: {\n serializedName: \"x-ms-blob-content-type\",\n xmlName: \"x-ms-blob-content-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentMD5\"],\n mapper: {\n serializedName: \"x-ms-blob-content-md5\",\n xmlName: \"x-ms-blob-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobContentEncoding: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentEncoding\"],\n mapper: {\n serializedName: \"x-ms-blob-content-encoding\",\n xmlName: \"x-ms-blob-content-encoding\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLanguage: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentLanguage\"],\n mapper: {\n serializedName: \"x-ms-blob-content-language\",\n xmlName: \"x-ms-blob-content-language\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentDisposition: OperationParameter = {\n parameterPath: [\"options\", \"blobHttpHeaders\", \"blobContentDisposition\"],\n mapper: {\n serializedName: \"x-ms-blob-content-disposition\",\n xmlName: \"x-ms-blob-content-disposition\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp12: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"immutabilityPolicies\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const immutabilityPolicyExpiry: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyExpiry\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-until-date\",\n xmlName: \"x-ms-immutability-policy-until-date\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const immutabilityPolicyMode: OperationParameter = {\n parameterPath: [\"options\", \"immutabilityPolicyMode\"],\n mapper: {\n serializedName: \"x-ms-immutability-policy-mode\",\n xmlName: \"x-ms-immutability-policy-mode\",\n type: {\n name: \"Enum\",\n allowedValues: [\"Mutable\", \"Unlocked\", \"Locked\"]\n }\n }\n};\n\nexport const comp13: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"legalhold\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const legalHold: OperationParameter = {\n parameterPath: \"legalHold\",\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n required: true,\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const encryptionScope: OperationParameter = {\n parameterPath: [\"options\", \"encryptionScope\"],\n mapper: {\n serializedName: \"x-ms-encryption-scope\",\n xmlName: \"x-ms-encryption-scope\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp14: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"snapshot\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier: OperationParameter = {\n parameterPath: [\"options\", \"tier\"],\n mapper: {\n serializedName: \"x-ms-access-tier\",\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const rehydratePriority: OperationParameter = {\n parameterPath: [\"options\", \"rehydratePriority\"],\n mapper: {\n serializedName: \"x-ms-rehydrate-priority\",\n xmlName: \"x-ms-rehydrate-priority\",\n type: {\n name: \"Enum\",\n allowedValues: [\"High\", \"Standard\"]\n }\n }\n};\n\nexport const sourceIfModifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfModifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-modified-since\",\n xmlName: \"x-ms-source-if-modified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfUnmodifiedSince: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfUnmodifiedSince\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-unmodified-since\",\n xmlName: \"x-ms-source-if-unmodified-since\",\n type: {\n name: \"DateTimeRfc1123\"\n }\n }\n};\n\nexport const sourceIfMatch: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfMatch\"],\n mapper: {\n serializedName: \"x-ms-source-if-match\",\n xmlName: \"x-ms-source-if-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfNoneMatch: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sourceModifiedAccessConditions\",\n \"sourceIfNoneMatch\"\n ],\n mapper: {\n serializedName: \"x-ms-source-if-none-match\",\n xmlName: \"x-ms-source-if-none-match\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceIfTags: OperationParameter = {\n parameterPath: [\"options\", \"sourceModifiedAccessConditions\", \"sourceIfTags\"],\n mapper: {\n serializedName: \"x-ms-source-if-tags\",\n xmlName: \"x-ms-source-if-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySource: OperationParameter = {\n parameterPath: \"copySource\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobTagsString: OperationParameter = {\n parameterPath: [\"options\", \"blobTagsString\"],\n mapper: {\n serializedName: \"x-ms-tags\",\n xmlName: \"x-ms-tags\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sealBlob: OperationParameter = {\n parameterPath: [\"options\", \"sealBlob\"],\n mapper: {\n serializedName: \"x-ms-seal-blob\",\n xmlName: \"x-ms-seal-blob\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const legalHold1: OperationParameter = {\n parameterPath: [\"options\", \"legalHold\"],\n mapper: {\n serializedName: \"x-ms-legal-hold\",\n xmlName: \"x-ms-legal-hold\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const xMsRequiresSync: OperationParameter = {\n parameterPath: \"xMsRequiresSync\",\n mapper: {\n defaultValue: \"true\",\n isConstant: true,\n serializedName: \"x-ms-requires-sync\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentMD5\"],\n mapper: {\n serializedName: \"x-ms-source-content-md5\",\n xmlName: \"x-ms-source-content-md5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const copySourceAuthorization: OperationParameter = {\n parameterPath: [\"options\", \"copySourceAuthorization\"],\n mapper: {\n serializedName: \"x-ms-copy-source-authorization\",\n xmlName: \"x-ms-copy-source-authorization\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceTags: OperationParameter = {\n parameterPath: [\"options\", \"copySourceTags\"],\n mapper: {\n serializedName: \"x-ms-copy-source-tag-option\",\n xmlName: \"x-ms-copy-source-tag-option\",\n type: {\n name: \"Enum\",\n allowedValues: [\"REPLACE\", \"COPY\"]\n }\n }\n};\n\nexport const comp15: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"copy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyActionAbortConstant: OperationParameter = {\n parameterPath: \"copyActionAbortConstant\",\n mapper: {\n defaultValue: \"abort\",\n isConstant: true,\n serializedName: \"x-ms-copy-action\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copyId: OperationQueryParameter = {\n parameterPath: \"copyId\",\n mapper: {\n serializedName: \"copyid\",\n required: true,\n xmlName: \"copyid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp16: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tier\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tier1: OperationParameter = {\n parameterPath: \"tier\",\n mapper: {\n serializedName: \"x-ms-access-tier\",\n required: true,\n xmlName: \"x-ms-access-tier\",\n type: {\n name: \"Enum\",\n allowedValues: [\n \"P4\",\n \"P6\",\n \"P10\",\n \"P15\",\n \"P20\",\n \"P30\",\n \"P40\",\n \"P50\",\n \"P60\",\n \"P70\",\n \"P80\",\n \"Hot\",\n \"Cool\",\n \"Archive\"\n ]\n }\n }\n};\n\nexport const queryRequest: OperationParameter = {\n parameterPath: [\"options\", \"queryRequest\"],\n mapper: QueryRequestMapper\n};\n\nexport const comp17: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"query\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp18: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"tags\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const tags: OperationParameter = {\n parameterPath: [\"options\", \"tags\"],\n mapper: BlobTagsMapper\n};\n\nexport const transactionalContentMD5: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentMD5\"],\n mapper: {\n serializedName: \"Content-MD5\",\n xmlName: \"Content-MD5\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const transactionalContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"transactionalContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-content-crc64\",\n xmlName: \"x-ms-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const blobType: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"PageBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobContentLength: OperationParameter = {\n parameterPath: \"blobContentLength\",\n mapper: {\n serializedName: \"x-ms-blob-content-length\",\n required: true,\n xmlName: \"x-ms-blob-content-length\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const blobSequenceNumber: OperationParameter = {\n parameterPath: [\"options\", \"blobSequenceNumber\"],\n mapper: {\n serializedName: \"x-ms-blob-sequence-number\",\n xmlName: \"x-ms-blob-sequence-number\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const contentType1: OperationParameter = {\n parameterPath: [\"options\", \"contentType\"],\n mapper: {\n defaultValue: \"application/octet-stream\",\n isConstant: true,\n serializedName: \"Content-Type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const body1: OperationParameter = {\n parameterPath: \"body\",\n mapper: {\n serializedName: \"body\",\n required: true,\n xmlName: \"body\",\n type: {\n name: \"Stream\"\n }\n }\n};\n\nexport const accept2: OperationParameter = {\n parameterPath: \"accept\",\n mapper: {\n defaultValue: \"application/xml\",\n isConstant: true,\n serializedName: \"Accept\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp19: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"page\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const pageWrite: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"update\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThanOrEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThanOrEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-le\",\n xmlName: \"x-ms-if-sequence-number-le\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberLessThan: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberLessThan\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-lt\",\n xmlName: \"x-ms-if-sequence-number-lt\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const ifSequenceNumberEqualTo: OperationParameter = {\n parameterPath: [\n \"options\",\n \"sequenceNumberAccessConditions\",\n \"ifSequenceNumberEqualTo\"\n ],\n mapper: {\n serializedName: \"x-ms-if-sequence-number-eq\",\n xmlName: \"x-ms-if-sequence-number-eq\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const pageWrite1: OperationParameter = {\n parameterPath: \"pageWrite\",\n mapper: {\n defaultValue: \"clear\",\n isConstant: true,\n serializedName: \"x-ms-page-write\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceUrl: OperationParameter = {\n parameterPath: \"sourceUrl\",\n mapper: {\n serializedName: \"x-ms-copy-source\",\n required: true,\n xmlName: \"x-ms-copy-source\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceRange: OperationParameter = {\n parameterPath: \"sourceRange\",\n mapper: {\n serializedName: \"x-ms-source-range\",\n required: true,\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sourceContentCrc64: OperationParameter = {\n parameterPath: [\"options\", \"sourceContentCrc64\"],\n mapper: {\n serializedName: \"x-ms-source-content-crc64\",\n xmlName: \"x-ms-source-content-crc64\",\n type: {\n name: \"ByteArray\"\n }\n }\n};\n\nexport const range1: OperationParameter = {\n parameterPath: \"range\",\n mapper: {\n serializedName: \"x-ms-range\",\n required: true,\n xmlName: \"x-ms-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp20: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"pagelist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevsnapshot: OperationQueryParameter = {\n parameterPath: [\"options\", \"prevsnapshot\"],\n mapper: {\n serializedName: \"prevsnapshot\",\n xmlName: \"prevsnapshot\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const prevSnapshotUrl: OperationParameter = {\n parameterPath: [\"options\", \"prevSnapshotUrl\"],\n mapper: {\n serializedName: \"x-ms-previous-snapshot-url\",\n xmlName: \"x-ms-previous-snapshot-url\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const sequenceNumberAction: OperationParameter = {\n parameterPath: \"sequenceNumberAction\",\n mapper: {\n serializedName: \"x-ms-sequence-number-action\",\n required: true,\n xmlName: \"x-ms-sequence-number-action\",\n type: {\n name: \"Enum\",\n allowedValues: [\"max\", \"update\", \"increment\"]\n }\n }\n};\n\nexport const comp21: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"incrementalcopy\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType1: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"AppendBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp22: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"appendblock\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const maxSize: OperationParameter = {\n parameterPath: [\"options\", \"appendPositionAccessConditions\", \"maxSize\"],\n mapper: {\n serializedName: \"x-ms-blob-condition-maxsize\",\n xmlName: \"x-ms-blob-condition-maxsize\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const appendPosition: OperationParameter = {\n parameterPath: [\n \"options\",\n \"appendPositionAccessConditions\",\n \"appendPosition\"\n ],\n mapper: {\n serializedName: \"x-ms-blob-condition-appendpos\",\n xmlName: \"x-ms-blob-condition-appendpos\",\n type: {\n name: \"Number\"\n }\n }\n};\n\nexport const sourceRange1: OperationParameter = {\n parameterPath: [\"options\", \"sourceRange\"],\n mapper: {\n serializedName: \"x-ms-source-range\",\n xmlName: \"x-ms-source-range\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const comp23: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"seal\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blobType2: OperationParameter = {\n parameterPath: \"blobType\",\n mapper: {\n defaultValue: \"BlockBlob\",\n isConstant: true,\n serializedName: \"x-ms-blob-type\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const copySourceBlobProperties: OperationParameter = {\n parameterPath: [\"options\", \"copySourceBlobProperties\"],\n mapper: {\n serializedName: \"x-ms-copy-source-blob-properties\",\n xmlName: \"x-ms-copy-source-blob-properties\",\n type: {\n name: \"Boolean\"\n }\n }\n};\n\nexport const comp24: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"block\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blockId: OperationQueryParameter = {\n parameterPath: \"blockId\",\n mapper: {\n serializedName: \"blockid\",\n required: true,\n xmlName: \"blockid\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const blocks: OperationParameter = {\n parameterPath: \"blocks\",\n mapper: BlockLookupListMapper\n};\n\nexport const comp25: OperationQueryParameter = {\n parameterPath: \"comp\",\n mapper: {\n defaultValue: \"blocklist\",\n isConstant: true,\n serializedName: \"comp\",\n type: {\n name: \"String\"\n }\n }\n};\n\nexport const listType: OperationQueryParameter = {\n parameterPath: \"listType\",\n mapper: {\n defaultValue: \"committed\",\n serializedName: \"blocklisttype\",\n required: true,\n xmlName: \"blocklisttype\",\n type: {\n name: \"Enum\",\n allowedValues: [\"committed\", \"uncommitted\", \"all\"]\n }\n }\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobServiceProperties,\n ServiceSetPropertiesOptionalParams,\n ServiceSetPropertiesResponse,\n ServiceGetPropertiesOptionalParams,\n ServiceGetPropertiesResponse,\n ServiceGetStatisticsOptionalParams,\n ServiceGetStatisticsResponse,\n ServiceListContainersSegmentOptionalParams,\n ServiceListContainersSegmentResponse,\n KeyInfo,\n ServiceGetUserDelegationKeyOptionalParams,\n ServiceGetUserDelegationKeyResponse,\n ServiceGetAccountInfoResponse,\n ServiceSubmitBatchOptionalParams,\n ServiceSubmitBatchResponse,\n ServiceFilterBlobsOptionalParams,\n ServiceFilterBlobsResponse\n} from \"../models\";\n\n/** Class representing a Service. */\nexport class Service {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Service class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * Sets properties for a storage account's Blob service endpoint, including properties for Storage\n * Analytics and CORS (Cross-Origin Resource Sharing) rules\n * @param blobServiceProperties The StorageService properties.\n * @param options The options parameters.\n */\n setProperties(\n blobServiceProperties: BlobServiceProperties,\n options?: ServiceSetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobServiceProperties,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the properties of a storage account's Blob service, including properties for Storage Analytics\n * and CORS (Cross-Origin Resource Sharing) rules.\n * @param options The options parameters.\n */\n getProperties(\n options?: ServiceGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only available on the\n * secondary location endpoint when read-access geo-redundant replication is enabled for the storage\n * account.\n * @param options The options parameters.\n */\n getStatistics(\n options?: ServiceGetStatisticsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getStatisticsOperationSpec\n ) as Promise;\n }\n\n /**\n * The List Containers Segment operation returns a list of the containers under the specified account\n * @param options The options parameters.\n */\n listContainersSegment(\n options?: ServiceListContainersSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listContainersSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n * @param keyInfo Key information\n * @param options The options parameters.\n */\n getUserDelegationKey(\n keyInfo: KeyInfo,\n options?: ServiceGetUserDelegationKeyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n keyInfo,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getUserDelegationKeyOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ServiceSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a\n * given search expression. Filter blobs searches across all containers within a storage account but\n * can be scoped within the expression to a single container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ServiceFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst setPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ServiceSetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders\n }\n },\n requestBody: Parameters.blobServiceProperties,\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceProperties,\n headersMapper: Mappers.ServiceGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.comp,\n Parameters.timeoutInSeconds\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getStatisticsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobServiceStatistics,\n headersMapper: Mappers.ServiceGetStatisticsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listContainersSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListContainersSegmentResponse,\n headersMapper: Mappers.ServiceListContainersSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.include\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getUserDelegationKeyOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: Mappers.UserDelegationKey,\n headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders\n }\n },\n requestBody: Parameters.keyInfo,\n queryParameters: [\n Parameters.restype,\n Parameters.timeoutInSeconds,\n Parameters.comp3\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ServiceGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ServiceSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ServiceFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n ContainerCreateOptionalParams,\n ContainerCreateResponse,\n ContainerGetPropertiesOptionalParams,\n ContainerGetPropertiesResponse,\n ContainerDeleteOptionalParams,\n ContainerDeleteResponse,\n ContainerSetMetadataOptionalParams,\n ContainerSetMetadataResponse,\n ContainerGetAccessPolicyOptionalParams,\n ContainerGetAccessPolicyResponse,\n ContainerSetAccessPolicyOptionalParams,\n ContainerSetAccessPolicyResponse,\n ContainerRestoreOptionalParams,\n ContainerRestoreResponse,\n ContainerRenameOptionalParams,\n ContainerRenameResponse,\n ContainerSubmitBatchOptionalParams,\n ContainerSubmitBatchResponse,\n ContainerFilterBlobsOptionalParams,\n ContainerFilterBlobsResponse,\n ContainerAcquireLeaseOptionalParams,\n ContainerAcquireLeaseResponse,\n ContainerReleaseLeaseOptionalParams,\n ContainerReleaseLeaseResponse,\n ContainerRenewLeaseOptionalParams,\n ContainerRenewLeaseResponse,\n ContainerBreakLeaseOptionalParams,\n ContainerBreakLeaseResponse,\n ContainerChangeLeaseOptionalParams,\n ContainerChangeLeaseResponse,\n ContainerListBlobFlatSegmentOptionalParams,\n ContainerListBlobFlatSegmentResponse,\n ContainerListBlobHierarchySegmentOptionalParams,\n ContainerListBlobHierarchySegmentResponse,\n ContainerGetAccountInfoResponse\n} from \"../models\";\n\n/** Class representing a Container. */\nexport class Container {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Container class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * creates a new container under the specified account. If the container with the same name already\n * exists, the operation fails\n * @param options The options parameters.\n */\n create(\n options?: ContainerCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * returns all user-defined metadata and system properties for the specified container. The data\n * returned does not include the container's list of blobs\n * @param options The options parameters.\n */\n getProperties(\n options?: ContainerGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * operation marks the specified container for deletion. The container and any blobs contained within\n * it are later deleted during garbage collection\n * @param options The options parameters.\n */\n delete(\n options?: ContainerDeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * operation sets one or more user-defined name-value pairs for the specified container.\n * @param options The options parameters.\n */\n setMetadata(\n options?: ContainerSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * gets the permissions for the specified container. The permissions indicate whether container data\n * may be accessed publicly.\n * @param options The options parameters.\n */\n getAccessPolicy(\n options?: ContainerGetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * sets the permissions for the specified container. The permissions indicate whether blobs in a\n * container may be accessed publicly.\n * @param options The options parameters.\n */\n setAccessPolicy(\n options?: ContainerSetAccessPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setAccessPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * Restores a previously-deleted container.\n * @param options The options parameters.\n */\n restore(\n options?: ContainerRestoreOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n restoreOperationSpec\n ) as Promise;\n }\n\n /**\n * Renames an existing container.\n * @param sourceContainerName Required. Specifies the name of the container to rename.\n * @param options The options parameters.\n */\n rename(\n sourceContainerName: string,\n options?: ContainerRenameOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceContainerName,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renameOperationSpec\n ) as Promise;\n }\n\n /**\n * The Batch operation allows multiple API calls to be embedded into a single HTTP request.\n * @param contentLength The length of the request.\n * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch\n * boundary. Example header value: multipart/mixed; boundary=batch_\n * @param body Initial data\n * @param options The options parameters.\n */\n submitBatch(\n contentLength: number,\n multipartContentType: string,\n body: coreHttp.HttpRequestBody,\n options?: ContainerSubmitBatchOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n multipartContentType,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n submitBatchOperationSpec\n ) as Promise;\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given\n * search expression. Filter blobs searches within the given container.\n * @param options The options parameters.\n */\n filterBlobs(\n options?: ContainerFilterBlobsOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n filterBlobsOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n acquireLease(\n options?: ContainerAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: ContainerReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: ContainerRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param options The options parameters.\n */\n breakLease(\n options?: ContainerBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] establishes and manages a lock on a container for delete operations. The lock duration can\n * be 15 to 60 seconds, or can be infinite\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: ContainerChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param options The options parameters.\n */\n listBlobFlatSegment(\n options?: ContainerListBlobFlatSegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobFlatSegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The List Blobs operation returns a list of the blobs under the specified container\n * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix\n * element in the response body that acts as a placeholder for all blobs whose names begin with the\n * same substring up to the appearance of the delimiter character. The delimiter may be a single\n * character or a string.\n * @param options The options parameters.\n */\n listBlobHierarchySegment(\n delimiter: string,\n options?: ContainerListBlobHierarchySegmentOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n delimiter,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n listBlobHierarchySegmentOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.access,\n Parameters.defaultEncryptionScope,\n Parameters.preventEncryptionScopeOverride\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerDeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetMetadataExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp6\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: {\n name: \"Sequence\",\n element: {\n type: { name: \"Composite\", className: \"SignedIdentifier\" }\n }\n },\n serializedName: \"SignedIdentifiers\",\n xmlName: \"SignedIdentifiers\",\n xmlIsWrapped: true,\n xmlElementName: \"SignedIdentifier\"\n },\n headersMapper: Mappers.ContainerGetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setAccessPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerSetAccessPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders\n }\n },\n requestBody: Parameters.containerAcl,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp7\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.access,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst restoreOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerRestoreHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRestoreExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp8\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.deletedContainerName,\n Parameters.deletedContainerVersion\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renameOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenameHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenameExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp9\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.sourceContainerName,\n Parameters.sourceLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst submitBatchOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"POST\",\n responses: {\n 202: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.ContainerSubmitBatchHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders\n }\n },\n requestBody: Parameters.body,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp4,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.multipartContentType\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst filterBlobsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.FilterBlobSegment,\n headersMapper: Mappers.ContainerFilterBlobsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.comp5,\n Parameters.where,\n Parameters.restype2\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.ContainerAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.ContainerBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.restype2,\n Parameters.comp10\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobFlatSegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsFlatSegmentResponse,\n headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst listBlobHierarchySegmentOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.ListBlobsHierarchySegmentResponse,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp2,\n Parameters.prefix,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.restype2,\n Parameters.include1,\n Parameters.delimiter\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.ContainerGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlobDownloadOptionalParams,\n BlobDownloadResponse,\n BlobGetPropertiesOptionalParams,\n BlobGetPropertiesResponse,\n BlobDeleteOptionalParams,\n BlobDeleteResponse,\n BlobUndeleteOptionalParams,\n BlobUndeleteResponse,\n BlobExpiryOptions,\n BlobSetExpiryOptionalParams,\n BlobSetExpiryResponse,\n BlobSetHttpHeadersOptionalParams,\n BlobSetHttpHeadersResponse,\n BlobSetImmutabilityPolicyOptionalParams,\n BlobSetImmutabilityPolicyResponse,\n BlobDeleteImmutabilityPolicyOptionalParams,\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetLegalHoldOptionalParams,\n BlobSetLegalHoldResponse,\n BlobSetMetadataOptionalParams,\n BlobSetMetadataResponse,\n BlobAcquireLeaseOptionalParams,\n BlobAcquireLeaseResponse,\n BlobReleaseLeaseOptionalParams,\n BlobReleaseLeaseResponse,\n BlobRenewLeaseOptionalParams,\n BlobRenewLeaseResponse,\n BlobChangeLeaseOptionalParams,\n BlobChangeLeaseResponse,\n BlobBreakLeaseOptionalParams,\n BlobBreakLeaseResponse,\n BlobCreateSnapshotOptionalParams,\n BlobCreateSnapshotResponse,\n BlobStartCopyFromURLOptionalParams,\n BlobStartCopyFromURLResponse,\n BlobCopyFromURLOptionalParams,\n BlobCopyFromURLResponse,\n BlobAbortCopyFromURLOptionalParams,\n BlobAbortCopyFromURLResponse,\n AccessTier,\n BlobSetTierOptionalParams,\n BlobSetTierResponse,\n BlobGetAccountInfoResponse,\n BlobQueryOptionalParams,\n BlobQueryResponse,\n BlobGetTagsOptionalParams,\n BlobGetTagsResponse,\n BlobSetTagsOptionalParams,\n BlobSetTagsResponse\n} from \"../models\";\n\n/** Class representing a Blob. */\nexport class Blob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class Blob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Download operation reads or downloads a blob from the system, including its metadata and\n * properties. You can also call Download to read a snapshot.\n * @param options The options parameters.\n */\n download(\n options?: BlobDownloadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n downloadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system\n * properties for the blob. It does not return the content of the blob.\n * @param options The options parameters.\n */\n getProperties(\n options?: BlobGetPropertiesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPropertiesOperationSpec\n ) as Promise;\n }\n\n /**\n * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is\n * permanently removed from the storage account. If the storage account's soft delete feature is\n * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible\n * immediately. However, the blob service retains the blob or snapshot for the number of days specified\n * by the DeleteRetentionPolicy section of [Storage service properties]\n * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is\n * permanently removed from the storage account. Note that you continue to be charged for the\n * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the\n * \"include=deleted\" query parameter to discover which blobs and snapshots have been soft deleted. You\n * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a\n * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404\n * (ResourceNotFound).\n * @param options The options parameters.\n */\n delete(options?: BlobDeleteOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Undelete a blob that was previously soft deleted\n * @param options The options parameters.\n */\n undelete(\n options?: BlobUndeleteOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n undeleteOperationSpec\n ) as Promise;\n }\n\n /**\n * Sets the time a blob will expire and be deleted.\n * @param expiryOptions Required. Indicates mode of the expiry time\n * @param options The options parameters.\n */\n setExpiry(\n expiryOptions: BlobExpiryOptions,\n options?: BlobSetExpiryOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n expiryOptions,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setExpiryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set HTTP Headers operation sets system properties on the blob\n * @param options The options parameters.\n */\n setHttpHeaders(\n options?: BlobSetHttpHeadersOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setHttpHeadersOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Immutability Policy operation sets the immutability policy on the blob\n * @param options The options parameters.\n */\n setImmutabilityPolicy(\n options?: BlobSetImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Delete Immutability Policy operation deletes the immutability policy on the blob\n * @param options The options parameters.\n */\n deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n deleteImmutabilityPolicyOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Legal Hold operation sets a legal hold on the blob.\n * @param legalHold Specified if a legal hold should be set on the blob.\n * @param options The options parameters.\n */\n setLegalHold(\n legalHold: boolean,\n options?: BlobSetLegalHoldOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n legalHold,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setLegalHoldOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more\n * name-value pairs\n * @param options The options parameters.\n */\n setMetadata(\n options?: BlobSetMetadataOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setMetadataOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n acquireLease(\n options?: BlobAcquireLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n acquireLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n releaseLease(\n leaseId: string,\n options?: BlobReleaseLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n releaseLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param options The options parameters.\n */\n renewLease(\n leaseId: string,\n options?: BlobRenewLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n renewLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param leaseId Specifies the current lease ID on the resource.\n * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400\n * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor\n * (String) for a list of valid GUID string formats.\n * @param options The options parameters.\n */\n changeLease(\n leaseId: string,\n proposedLeaseId: string,\n options?: BlobChangeLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n leaseId,\n proposedLeaseId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n changeLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete\n * operations\n * @param options The options parameters.\n */\n breakLease(\n options?: BlobBreakLeaseOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n breakLeaseOperationSpec\n ) as Promise;\n }\n\n /**\n * The Create Snapshot operation creates a read-only snapshot of a blob\n * @param options The options parameters.\n */\n createSnapshot(\n options?: BlobCreateSnapshotOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createSnapshotOperationSpec\n ) as Promise;\n }\n\n /**\n * The Start Copy From URL operation copies a blob or an internet resource to a new blob.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n startCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return\n * a response until the copy is complete.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyFromURL(\n copySource: string,\n options?: BlobCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination\n * blob with zero length and full metadata.\n * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob\n * operation.\n * @param options The options parameters.\n */\n abortCopyFromURL(\n copyId: string,\n options?: BlobAbortCopyFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copyId,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n abortCopyFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant storage only). A\n * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block\n * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's\n * ETag.\n * @param tier Indicates the tier to be set on the blob.\n * @param options The options parameters.\n */\n setTier(\n tier: AccessTier,\n options?: BlobSetTierOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n tier,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTierOperationSpec\n ) as Promise;\n }\n\n /**\n * Returns the sku name and account kind\n * @param options The options parameters.\n */\n getAccountInfo(\n options?: coreHttp.OperationOptions\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getAccountInfoOperationSpec\n ) as Promise;\n }\n\n /**\n * The Query operation enables users to select/project on blob data by providing simple query\n * expressions.\n * @param options The options parameters.\n */\n query(options?: BlobQueryOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n queryOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Tags operation enables users to get the tags associated with a blob.\n * @param options The options parameters.\n */\n getTags(options?: BlobGetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getTagsOperationSpec\n ) as Promise;\n }\n\n /**\n * The Set Tags operation enables users to set tags on a blob.\n * @param options The options parameters.\n */\n setTags(options?: BlobSetTagsOptionalParams): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n setTagsOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\n\nconst downloadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobDownloadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDownloadExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.rangeGetContentMD5,\n Parameters.rangeGetContentCRC64,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPropertiesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"HEAD\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetPropertiesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetPropertiesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobDeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.blobDeleteType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.deleteSnapshots\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst undeleteOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobUndeleteHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobUndeleteExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setExpiryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetExpiryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetExpiryExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.expiryOptions,\n Parameters.expiresOn\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setHttpHeadersOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetHttpHeadersHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifUnmodifiedSince,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst deleteImmutabilityPolicyOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"DELETE\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setLegalHoldOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetLegalHoldHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.legalHold\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setMetadataOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetMetadataHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetMetadataExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst acquireLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobAcquireLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action,\n Parameters.duration,\n Parameters.proposedLeaseId,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst releaseLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobReleaseLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action1,\n Parameters.leaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst renewLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobRenewLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobRenewLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action2,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst changeLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobChangeLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobChangeLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.leaseId1,\n Parameters.action4,\n Parameters.proposedLeaseId1,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst breakLeaseOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobBreakLeaseHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobBreakLeaseExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.action3,\n Parameters.breakPeriod,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst createSnapshotOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlobCreateSnapshotHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst startCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobStartCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.tier,\n Parameters.rehydratePriority,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sealBlob,\n Parameters.legalHold1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.BlobCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.xMsRequiresSync,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst abortCopyFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobAbortCopyFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp15,\n Parameters.copyId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.copyActionAbortConstant\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTierOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n 202: {\n headersMapper: Mappers.BlobSetTierHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTierExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp16\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.rehydratePriority,\n Parameters.tier1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getAccountInfoOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n headersMapper: Mappers.BlobGetAccountInfoHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.restype1],\n urlParameters: [Parameters.url],\n headerParameters: [Parameters.version, Parameters.accept1],\n isXML: true,\n serializer: xmlSerializer\n};\nconst queryOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"POST\",\n responses: {\n 200: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n 206: {\n bodyMapper: {\n type: { name: \"Stream\" },\n serializedName: \"parsedResponse\"\n },\n headersMapper: Mappers.BlobQueryHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobQueryExceptionHeaders\n }\n },\n requestBody: Parameters.queryRequest,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp17\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlobTags,\n headersMapper: Mappers.BlobGetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobGetTagsExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst setTagsOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 204: {\n headersMapper: Mappers.BlobSetTagsHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlobSetTagsExceptionHeaders\n }\n },\n requestBody: Parameters.tags,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.versionId,\n Parameters.comp18\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.leaseId,\n Parameters.ifTags,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n PageBlobCreateOptionalParams,\n PageBlobCreateResponse,\n PageBlobUploadPagesOptionalParams,\n PageBlobUploadPagesResponse,\n PageBlobClearPagesOptionalParams,\n PageBlobClearPagesResponse,\n PageBlobUploadPagesFromURLOptionalParams,\n PageBlobUploadPagesFromURLResponse,\n PageBlobGetPageRangesOptionalParams,\n PageBlobGetPageRangesResponse,\n PageBlobGetPageRangesDiffOptionalParams,\n PageBlobGetPageRangesDiffResponse,\n PageBlobResizeOptionalParams,\n PageBlobResizeResponse,\n SequenceNumberActionType,\n PageBlobUpdateSequenceNumberOptionalParams,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobCopyIncrementalOptionalParams,\n PageBlobCopyIncrementalResponse\n} from \"../models\";\n\n/** Class representing a PageBlob. */\nexport class PageBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class PageBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create operation creates a new page blob.\n * @param contentLength The length of the request.\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n blobContentLength: number,\n options?: PageBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n uploadPages(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: PageBlobUploadPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Clear Pages operation clears a set of pages from a page blob\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n clearPages(\n contentLength: number,\n options?: PageBlobClearPagesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n clearPagesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a\n * URL\n * @param sourceUrl Specify a URL to the copy source.\n * @param sourceRange Bytes of source data in the specified range. The length of this range should\n * match the ContentLength header and x-ms-range/Range destination range header.\n * @param contentLength The length of the request.\n * @param range The range of bytes to which the source range would be written. The range should be 512\n * aligned and range-end is required.\n * @param options The options parameters.\n */\n uploadPagesFromURL(\n sourceUrl: string,\n sourceRange: string,\n contentLength: number,\n range: string,\n options?: PageBlobUploadPagesFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n sourceRange,\n contentLength,\n range,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadPagesFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a\n * page blob\n * @param options The options parameters.\n */\n getPageRanges(\n options?: PageBlobGetPageRangesOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were\n * changed between target blob and previous snapshot.\n * @param options The options parameters.\n */\n getPageRangesDiff(\n options?: PageBlobGetPageRangesDiffOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getPageRangesDiffOperationSpec\n ) as Promise;\n }\n\n /**\n * Resize the Blob\n * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The\n * page blob size must be aligned to a 512-byte boundary.\n * @param options The options parameters.\n */\n resize(\n blobContentLength: number,\n options?: PageBlobResizeOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blobContentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n resizeOperationSpec\n ) as Promise;\n }\n\n /**\n * Update the sequence number of the blob\n * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request.\n * This property applies to page blobs only. This property indicates how the service should modify the\n * blob's sequence number\n * @param options The options parameters.\n */\n updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n options?: PageBlobUpdateSequenceNumberOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sequenceNumberAction,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n updateSequenceNumberOperationSpec\n ) as Promise;\n }\n\n /**\n * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob.\n * The snapshot is copied such that only the differential changes between the previously copied\n * snapshot are transferred to the destination. The copied snapshots are complete copies of the\n * original snapshot and can be read or copied from as usual. This API is supported since REST version\n * 2016-05-31.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n copyIncremental(\n copySource: string,\n options?: PageBlobCopyIncrementalOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n copyIncrementalOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType,\n Parameters.blobContentLength,\n Parameters.blobSequenceNumber\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo\n ],\n mediaType: \"binary\",\n serializer\n};\nconst clearPagesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobClearPagesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobClearPagesExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.pageWrite1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst uploadPagesFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.pageWrite,\n Parameters.ifSequenceNumberLessThanOrEqualTo,\n Parameters.ifSequenceNumberLessThan,\n Parameters.ifSequenceNumberEqualTo,\n Parameters.sourceUrl,\n Parameters.sourceRange,\n Parameters.sourceContentCrc64,\n Parameters.range1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst getPageRangesDiffOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.PageList,\n headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.marker,\n Parameters.maxPageSize,\n Parameters.snapshot,\n Parameters.comp20,\n Parameters.prevsnapshot\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.range,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.prevSnapshotUrl\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst resizeOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobResizeHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobResizeExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.blobContentLength\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst updateSequenceNumberOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders\n }\n },\n queryParameters: [Parameters.comp, Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobSequenceNumber,\n Parameters.sequenceNumberAction\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst copyIncrementalOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 202: {\n headersMapper: Mappers.PageBlobCopyIncrementalHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.copySource\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n AppendBlobCreateOptionalParams,\n AppendBlobCreateResponse,\n AppendBlobAppendBlockOptionalParams,\n AppendBlobAppendBlockResponse,\n AppendBlobAppendBlockFromUrlOptionalParams,\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobSealOptionalParams,\n AppendBlobSealResponse\n} from \"../models\";\n\n/** Class representing a AppendBlob. */\nexport class AppendBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class AppendBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Create Append Blob operation creates a new append blob.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n create(\n contentLength: number,\n options?: AppendBlobCreateOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n createOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob. The\n * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to\n * AppendBlob. Append Block is supported only on version 2015-02-21 version or later.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n appendBlock(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: AppendBlobAppendBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob where\n * the contents are read from a source url. The Append Block operation is permitted only if the blob\n * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version\n * 2015-02-21 version or later.\n * @param sourceUrl Specify a URL to the copy source.\n * @param contentLength The length of the request.\n * @param options The options parameters.\n */\n appendBlockFromUrl(\n sourceUrl: string,\n contentLength: number,\n options?: AppendBlobAppendBlockFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n sourceUrl,\n contentLength,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n appendBlockFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version\n * 2019-12-12 version or later.\n * @param options The options parameters.\n */\n seal(\n options?: AppendBlobSealOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n sealOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst createOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobCreateHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobCreateExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.blobType1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst appendBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.maxSize,\n Parameters.appendPosition\n ],\n mediaType: \"binary\",\n serializer\n};\nconst appendBlockFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.transactionalContentMD5,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.maxSize,\n Parameters.appendPosition,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst sealOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 200: {\n headersMapper: Mappers.AppendBlobSealHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.AppendBlobSealExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.appendPosition\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport * as Mappers from \"../models/mappers\";\nimport * as Parameters from \"../models/parameters\";\nimport { StorageClientContext } from \"../storageClientContext\";\nimport {\n BlockBlobUploadOptionalParams,\n BlockBlobUploadResponse,\n BlockBlobPutBlobFromUrlOptionalParams,\n BlockBlobPutBlobFromUrlResponse,\n BlockBlobStageBlockOptionalParams,\n BlockBlobStageBlockResponse,\n BlockBlobStageBlockFromURLOptionalParams,\n BlockBlobStageBlockFromURLResponse,\n BlockLookupList,\n BlockBlobCommitBlockListOptionalParams,\n BlockBlobCommitBlockListResponse,\n BlockListType,\n BlockBlobGetBlockListOptionalParams,\n BlockBlobGetBlockListResponse\n} from \"../models\";\n\n/** Class representing a BlockBlob. */\nexport class BlockBlob {\n private readonly client: StorageClientContext;\n\n /**\n * Initialize a new instance of the class BlockBlob class.\n * @param client Reference to the service client\n */\n constructor(client: StorageClientContext) {\n this.client = client;\n }\n\n /**\n * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing\n * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put\n * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a\n * partial update of the content of a block blob, use the Put Block List operation.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n upload(\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobUploadOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n uploadOperationSpec\n ) as Promise;\n }\n\n /**\n * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read\n * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are\n * not supported with Put Blob from URL; the content of an existing blob is overwritten with the\n * content of the new blob. To perform partial updates to a block blob’s contents using a source URL,\n * use the Put Block from URL API in conjunction with Put Block List.\n * @param contentLength The length of the request.\n * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to\n * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would\n * appear in a request URI. The source blob must either be public or must be authenticated via a shared\n * access signature.\n * @param options The options parameters.\n */\n putBlobFromUrl(\n contentLength: number,\n copySource: string,\n options?: BlockBlobPutBlobFromUrlOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n contentLength,\n copySource,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n putBlobFromUrlOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param body Initial data\n * @param options The options parameters.\n */\n stageBlock(\n blockId: string,\n contentLength: number,\n body: coreHttp.HttpRequestBody,\n options?: BlockBlobStageBlockOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n body,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockOperationSpec\n ) as Promise;\n }\n\n /**\n * The Stage Block operation creates a new block to be committed as part of a blob where the contents\n * are read from a URL.\n * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string\n * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified\n * for the blockid parameter must be the same size for each block.\n * @param contentLength The length of the request.\n * @param sourceUrl Specify a URL to the copy source.\n * @param options The options parameters.\n */\n stageBlockFromURL(\n blockId: string,\n contentLength: number,\n sourceUrl: string,\n options?: BlockBlobStageBlockFromURLOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blockId,\n contentLength,\n sourceUrl,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n stageBlockFromURLOperationSpec\n ) as Promise;\n }\n\n /**\n * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the\n * blob. In order to be written as part of a blob, a block must have been successfully written to the\n * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading\n * only those blocks that have changed, then committing the new and existing blocks together. You can\n * do this by specifying whether to commit a block from the committed block list or from the\n * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list\n * it may belong to.\n * @param blocks Blob Blocks.\n * @param options The options parameters.\n */\n commitBlockList(\n blocks: BlockLookupList,\n options?: BlockBlobCommitBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n blocks,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n commitBlockListOperationSpec\n ) as Promise;\n }\n\n /**\n * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block\n * blob\n * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted\n * blocks, or both lists together.\n * @param options The options parameters.\n */\n getBlockList(\n listType: BlockListType,\n options?: BlockBlobGetBlockListOptionalParams\n ): Promise {\n const operationArguments: coreHttp.OperationArguments = {\n listType,\n options: coreHttp.operationOptionsToRequestOptionsBase(options || {})\n };\n return this.client.sendOperationRequest(\n operationArguments,\n getBlockListOperationSpec\n ) as Promise;\n }\n}\n// Operation Specifications\nconst xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true);\nconst serializer = new coreHttp.Serializer(Mappers, /* isXml */ false);\n\nconst uploadOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobUploadHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobUploadExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.contentType1,\n Parameters.accept2,\n Parameters.blobType2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst putBlobFromUrlOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders\n }\n },\n queryParameters: [Parameters.timeoutInSeconds],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceIfTags,\n Parameters.copySource,\n Parameters.blobTagsString,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.copySourceTags,\n Parameters.transactionalContentMD5,\n Parameters.blobType2,\n Parameters.copySourceBlobProperties\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst stageBlockOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders\n }\n },\n requestBody: Parameters.body1,\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64,\n Parameters.contentType1,\n Parameters.accept2\n ],\n mediaType: \"binary\",\n serializer\n};\nconst stageBlockFromURLOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.comp24,\n Parameters.blockId\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.contentLength,\n Parameters.leaseId,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.encryptionScope,\n Parameters.sourceIfModifiedSince,\n Parameters.sourceIfUnmodifiedSince,\n Parameters.sourceIfMatch,\n Parameters.sourceIfNoneMatch,\n Parameters.sourceContentMD5,\n Parameters.copySourceAuthorization,\n Parameters.sourceUrl,\n Parameters.sourceContentCrc64,\n Parameters.sourceRange1\n ],\n isXML: true,\n serializer: xmlSerializer\n};\nconst commitBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"PUT\",\n responses: {\n 201: {\n headersMapper: Mappers.BlockBlobCommitBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders\n }\n },\n requestBody: Parameters.blocks,\n queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.contentType,\n Parameters.accept,\n Parameters.version,\n Parameters.requestId,\n Parameters.metadata,\n Parameters.leaseId,\n Parameters.ifModifiedSince,\n Parameters.ifUnmodifiedSince,\n Parameters.encryptionKey,\n Parameters.encryptionKeySha256,\n Parameters.encryptionAlgorithm,\n Parameters.ifMatch,\n Parameters.ifNoneMatch,\n Parameters.ifTags,\n Parameters.blobCacheControl,\n Parameters.blobContentType,\n Parameters.blobContentMD5,\n Parameters.blobContentEncoding,\n Parameters.blobContentLanguage,\n Parameters.blobContentDisposition,\n Parameters.immutabilityPolicyExpiry,\n Parameters.immutabilityPolicyMode,\n Parameters.encryptionScope,\n Parameters.tier,\n Parameters.blobTagsString,\n Parameters.legalHold1,\n Parameters.transactionalContentMD5,\n Parameters.transactionalContentCrc64\n ],\n isXML: true,\n contentType: \"application/xml; charset=utf-8\",\n mediaType: \"xml\",\n serializer: xmlSerializer\n};\nconst getBlockListOperationSpec: coreHttp.OperationSpec = {\n path: \"/{containerName}/{blob}\",\n httpMethod: \"GET\",\n responses: {\n 200: {\n bodyMapper: Mappers.BlockList,\n headersMapper: Mappers.BlockBlobGetBlockListHeaders\n },\n default: {\n bodyMapper: Mappers.StorageError,\n headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders\n }\n },\n queryParameters: [\n Parameters.timeoutInSeconds,\n Parameters.snapshot,\n Parameters.comp25,\n Parameters.listType\n ],\n urlParameters: [Parameters.url],\n headerParameters: [\n Parameters.version,\n Parameters.requestId,\n Parameters.accept1,\n Parameters.leaseId,\n Parameters.ifTags\n ],\n isXML: true,\n serializer: xmlSerializer\n};\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createClientLogger } from \"@azure/logger\";\n\n/**\n * The `@azure/logger` configuration for this package.\n */\nexport const logger = createClientLogger(\"storage-blob\");\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const SDK_VERSION: string = \"12.11.0\";\nexport const SERVICE_VERSION: string = \"2021-08-06\";\n\nexport const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES: number = 256 * 1024 * 1024; // 256MB\nexport const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES: number = 4000 * 1024 * 1024; // 4000MB\nexport const BLOCK_BLOB_MAX_BLOCKS: number = 50000;\nexport const DEFAULT_BLOCK_BUFFER_SIZE_BYTES: number = 8 * 1024 * 1024; // 8MB\nexport const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES: number = 4 * 1024 * 1024; // 4MB\nexport const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS: number = 5;\n/**\n * The OAuth scope to use with Azure Storage.\n */\nexport const StorageOAuthScopes: string | string[] = \"https://storage.azure.com/.default\";\n\nexport const URLConstants = {\n Parameters: {\n FORCE_BROWSER_NO_CACHE: \"_\",\n SIGNATURE: \"sig\",\n SNAPSHOT: \"snapshot\",\n VERSIONID: \"versionid\",\n TIMEOUT: \"timeout\",\n },\n};\n\nexport const HTTPURLConnection = {\n HTTP_ACCEPTED: 202,\n HTTP_CONFLICT: 409,\n HTTP_NOT_FOUND: 404,\n HTTP_PRECON_FAILED: 412,\n HTTP_RANGE_NOT_SATISFIABLE: 416,\n};\n\nexport const HeaderConstants = {\n AUTHORIZATION: \"Authorization\",\n AUTHORIZATION_SCHEME: \"Bearer\",\n CONTENT_ENCODING: \"Content-Encoding\",\n CONTENT_ID: \"Content-ID\",\n CONTENT_LANGUAGE: \"Content-Language\",\n CONTENT_LENGTH: \"Content-Length\",\n CONTENT_MD5: \"Content-Md5\",\n CONTENT_TRANSFER_ENCODING: \"Content-Transfer-Encoding\",\n CONTENT_TYPE: \"Content-Type\",\n COOKIE: \"Cookie\",\n DATE: \"date\",\n IF_MATCH: \"if-match\",\n IF_MODIFIED_SINCE: \"if-modified-since\",\n IF_NONE_MATCH: \"if-none-match\",\n IF_UNMODIFIED_SINCE: \"if-unmodified-since\",\n PREFIX_FOR_STORAGE: \"x-ms-\",\n RANGE: \"Range\",\n USER_AGENT: \"User-Agent\",\n X_MS_CLIENT_REQUEST_ID: \"x-ms-client-request-id\",\n X_MS_COPY_SOURCE: \"x-ms-copy-source\",\n X_MS_DATE: \"x-ms-date\",\n X_MS_ERROR_CODE: \"x-ms-error-code\",\n X_MS_VERSION: \"x-ms-version\",\n};\n\nexport const ETagNone = \"\";\nexport const ETagAny = \"*\";\n\nexport const SIZE_1_MB = 1 * 1024 * 1024;\nexport const BATCH_MAX_REQUEST = 256;\nexport const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB;\nexport const HTTP_LINE_ENDING = \"\\r\\n\";\nexport const HTTP_VERSION_1_1 = \"HTTP/1.1\";\n\nexport const EncryptionAlgorithmAES25 = \"AES256\";\n\nexport const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`;\n\nexport const StorageBlobLoggingAllowedHeaderNames = [\n \"Access-Control-Allow-Origin\",\n \"Cache-Control\",\n \"Content-Length\",\n \"Content-Type\",\n \"Date\",\n \"Request-Id\",\n \"traceparent\",\n \"Transfer-Encoding\",\n \"User-Agent\",\n \"x-ms-client-request-id\",\n \"x-ms-date\",\n \"x-ms-error-code\",\n \"x-ms-request-id\",\n \"x-ms-return-client-request-id\",\n \"x-ms-version\",\n \"Accept-Ranges\",\n \"Content-Disposition\",\n \"Content-Encoding\",\n \"Content-Language\",\n \"Content-MD5\",\n \"Content-Range\",\n \"ETag\",\n \"Last-Modified\",\n \"Server\",\n \"Vary\",\n \"x-ms-content-crc64\",\n \"x-ms-copy-action\",\n \"x-ms-copy-completion-time\",\n \"x-ms-copy-id\",\n \"x-ms-copy-progress\",\n \"x-ms-copy-status\",\n \"x-ms-has-immutability-policy\",\n \"x-ms-has-legal-hold\",\n \"x-ms-lease-state\",\n \"x-ms-lease-status\",\n \"x-ms-range\",\n \"x-ms-request-server-encrypted\",\n \"x-ms-server-encrypted\",\n \"x-ms-snapshot\",\n \"x-ms-source-range\",\n \"If-Match\",\n \"If-Modified-Since\",\n \"If-None-Match\",\n \"If-Unmodified-Since\",\n \"x-ms-access-tier\",\n \"x-ms-access-tier-change-time\",\n \"x-ms-access-tier-inferred\",\n \"x-ms-account-kind\",\n \"x-ms-archive-status\",\n \"x-ms-blob-append-offset\",\n \"x-ms-blob-cache-control\",\n \"x-ms-blob-committed-block-count\",\n \"x-ms-blob-condition-appendpos\",\n \"x-ms-blob-condition-maxsize\",\n \"x-ms-blob-content-disposition\",\n \"x-ms-blob-content-encoding\",\n \"x-ms-blob-content-language\",\n \"x-ms-blob-content-length\",\n \"x-ms-blob-content-md5\",\n \"x-ms-blob-content-type\",\n \"x-ms-blob-public-access\",\n \"x-ms-blob-sequence-number\",\n \"x-ms-blob-type\",\n \"x-ms-copy-destination-snapshot\",\n \"x-ms-creation-time\",\n \"x-ms-default-encryption-scope\",\n \"x-ms-delete-snapshots\",\n \"x-ms-delete-type-permanent\",\n \"x-ms-deny-encryption-scope-override\",\n \"x-ms-encryption-algorithm\",\n \"x-ms-if-sequence-number-eq\",\n \"x-ms-if-sequence-number-le\",\n \"x-ms-if-sequence-number-lt\",\n \"x-ms-incremental-copy\",\n \"x-ms-lease-action\",\n \"x-ms-lease-break-period\",\n \"x-ms-lease-duration\",\n \"x-ms-lease-id\",\n \"x-ms-lease-time\",\n \"x-ms-page-write\",\n \"x-ms-proposed-lease-id\",\n \"x-ms-range-get-content-md5\",\n \"x-ms-rehydrate-priority\",\n \"x-ms-sequence-number-action\",\n \"x-ms-sku-name\",\n \"x-ms-source-content-md5\",\n \"x-ms-source-if-match\",\n \"x-ms-source-if-modified-since\",\n \"x-ms-source-if-none-match\",\n \"x-ms-source-if-unmodified-since\",\n \"x-ms-tag-count\",\n \"x-ms-encryption-key-sha256\",\n \"x-ms-if-tags\",\n \"x-ms-source-if-tags\",\n];\n\nexport const StorageBlobLoggingAllowedQueryParameters = [\n \"comp\",\n \"maxresults\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"se\",\n \"si\",\n \"sip\",\n \"sp\",\n \"spr\",\n \"sr\",\n \"srt\",\n \"ss\",\n \"st\",\n \"sv\",\n \"include\",\n \"marker\",\n \"prefix\",\n \"copyid\",\n \"restype\",\n \"blockid\",\n \"blocklisttype\",\n \"delimiter\",\n \"prevsnapshot\",\n \"ske\",\n \"skoid\",\n \"sks\",\n \"skt\",\n \"sktid\",\n \"skv\",\n \"snapshot\",\n];\n\nexport const BlobUsesCustomerSpecifiedEncryptionMsg = \"BlobUsesCustomerSpecifiedEncryption\";\nexport const BlobDoesNotUseCustomerSpecifiedEncryption =\n \"BlobDoesNotUseCustomerSpecifiedEncryption\";\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { HttpHeaders, isNode, URLBuilder, TokenCredential } from \"@azure/core-http\";\n\nimport {\n BlobQueryArrowConfiguration,\n BlobQueryCsvTextConfiguration,\n BlobQueryJsonTextConfiguration,\n BlobQueryParquetConfiguration,\n} from \"../Clients\";\nimport {\n QuerySerialization,\n BlobTags,\n BlobName,\n ListBlobsFlatSegmentResponse,\n ListBlobsHierarchySegmentResponse,\n BlobItemInternal,\n BlobPrefix,\n BlobType,\n LeaseStatusType,\n LeaseStateType,\n LeaseDurationType,\n CopyStatusType,\n AccessTier,\n ArchiveStatus,\n RehydratePriority,\n BlobImmutabilityPolicyMode,\n BlobTag,\n PageRange,\n ClearRange,\n BlobPropertiesInternal,\n} from \"../generated/src/models\";\nimport { DevelopmentConnectionString, HeaderConstants, URLConstants } from \"./constants\";\nimport {\n Tags,\n ObjectReplicationPolicy,\n ObjectReplicationRule,\n ObjectReplicationStatus,\n HttpAuthorization,\n} from \"../models\";\nimport {\n ListBlobsFlatSegmentResponseModel,\n BlobItemInternal as BlobItemInternalModel,\n ListBlobsHierarchySegmentResponseModel,\n BlobPrefix as BlobPrefixModel,\n PageBlobGetPageRangesDiffResponseModel,\n PageRangeInfo,\n} from \"../generatedModels\";\n\n/**\n * Reserved URL characters must be properly escaped for Storage services like Blob or File.\n *\n * ## URL encode and escape strategy for JS SDKs\n *\n * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not.\n * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL\n * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors.\n *\n * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK.\n *\n * This is what legacy V2 SDK does, simple and works for most of the cases.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * SDK will encode it to \"http://account.blob.core.windows.net/con/b%253A\" and send to server. A blob named \"b%3A\" will be created.\n *\n * But this strategy will make it not possible to create a blob with \"?\" in it's name. Because when customer URL string is\n * \"http://account.blob.core.windows.net/con/blob?name\", the \"?name\" will be treated as URL paramter instead of blob name.\n * If customer URL string is \"http://account.blob.core.windows.net/con/blob%3Fname\", a blob named \"blob%3Fname\" will be created.\n * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it.\n * We cannot accept a SDK cannot create a blob name with \"?\". So we implement strategy two:\n *\n * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters.\n *\n * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b:\",\n * SDK will escape \":\" like \"http://account.blob.core.windows.net/con/b%3A\" and send to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%3A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%3A\" to server. A blob named \"b:\" will be created.\n * - When customer URL string is \"http://account.blob.core.windows.net/con/b%253A\",\n * There is no special characters, so send \"http://account.blob.core.windows.net/con/b%253A\" to server. A blob named \"b%3A\" will be created.\n *\n * This strategy gives us flexibility to create with any special characters. But \"%\" will be treated as a special characters, if the URL string\n * is not encoded, there shouldn't a \"%\" in the URL string, otherwise the URL is not a valid URL.\n * If customer needs to create a blob with \"%\" in it's blob name, use \"%25\" instead of \"%\". Just like above 3rd sample.\n * And following URL strings are invalid:\n * - \"http://account.blob.core.windows.net/con/b%\"\n * - \"http://account.blob.core.windows.net/con/b%2\"\n * - \"http://account.blob.core.windows.net/con/b%G\"\n *\n * Another special character is \"?\", use \"%2F\" to represent a blob name with \"?\" in a URL string.\n *\n * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)`\n *\n * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata\n *\n * @param url -\n */\nexport function escapeURLPath(url: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path || \"/\";\n\n path = escape(path);\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\nexport interface ConnectionString {\n kind: \"AccountConnString\" | \"SASConnString\";\n url: string;\n accountName: string;\n accountKey?: any;\n accountSas?: string;\n proxyUri?: string; // Development Connection String may contain proxyUri\n}\n\nfunction getProxyUriFromDevConnString(connectionString: string): string {\n // Development Connection String\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key\n let proxyUri = \"\";\n if (connectionString.search(\"DevelopmentStorageProxyUri=\") !== -1) {\n // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri\n const matchCredentials = connectionString.split(\";\");\n for (const element of matchCredentials) {\n if (element.trim().startsWith(\"DevelopmentStorageProxyUri=\")) {\n proxyUri = element.trim().match(\"DevelopmentStorageProxyUri=(.*)\")![1];\n }\n }\n }\n return proxyUri;\n}\n\nexport function getValueInConnString(\n connectionString: string,\n argument:\n | \"BlobEndpoint\"\n | \"AccountName\"\n | \"AccountKey\"\n | \"DefaultEndpointsProtocol\"\n | \"EndpointSuffix\"\n | \"SharedAccessSignature\"\n): string {\n const elements = connectionString.split(\";\");\n for (const element of elements) {\n if (element.trim().startsWith(argument)) {\n return element.trim().match(argument + \"=(.*)\")![1];\n }\n }\n return \"\";\n}\n\n/**\n * Extracts the parts of an Azure Storage account connection string.\n *\n * @param connectionString - Connection string.\n * @returns String key value pairs of the storage account's url and credentials.\n */\nexport function extractConnectionStringParts(connectionString: string): ConnectionString {\n let proxyUri = \"\";\n\n if (connectionString.startsWith(\"UseDevelopmentStorage=true\")) {\n // Development connection string\n proxyUri = getProxyUriFromDevConnString(connectionString);\n connectionString = DevelopmentConnectionString;\n }\n\n // Matching BlobEndpoint in the Account connection string\n let blobEndpoint = getValueInConnString(connectionString, \"BlobEndpoint\");\n // Slicing off '/' at the end if exists\n // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end)\n blobEndpoint = blobEndpoint.endsWith(\"/\") ? blobEndpoint.slice(0, -1) : blobEndpoint;\n\n if (\n connectionString.search(\"DefaultEndpointsProtocol=\") !== -1 &&\n connectionString.search(\"AccountKey=\") !== -1\n ) {\n // Account connection string\n\n let defaultEndpointsProtocol = \"\";\n let accountName = \"\";\n let accountKey = Buffer.from(\"accountKey\", \"base64\");\n let endpointSuffix = \"\";\n\n // Get account name and key\n accountName = getValueInConnString(connectionString, \"AccountName\");\n accountKey = Buffer.from(getValueInConnString(connectionString, \"AccountKey\"), \"base64\");\n\n if (!blobEndpoint) {\n // BlobEndpoint is not present in the Account connection string\n // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`\n\n defaultEndpointsProtocol = getValueInConnString(connectionString, \"DefaultEndpointsProtocol\");\n const protocol = defaultEndpointsProtocol!.toLowerCase();\n if (protocol !== \"https\" && protocol !== \"http\") {\n throw new Error(\n \"Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'\"\n );\n }\n\n endpointSuffix = getValueInConnString(connectionString, \"EndpointSuffix\");\n if (!endpointSuffix) {\n throw new Error(\"Invalid EndpointSuffix in the provided Connection String\");\n }\n blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n }\n\n if (!accountName) {\n throw new Error(\"Invalid AccountName in the provided Connection String\");\n } else if (accountKey.length === 0) {\n throw new Error(\"Invalid AccountKey in the provided Connection String\");\n }\n\n return {\n kind: \"AccountConnString\",\n url: blobEndpoint,\n accountName,\n accountKey,\n proxyUri,\n };\n } else {\n // SAS connection string\n\n const accountSas = getValueInConnString(connectionString, \"SharedAccessSignature\");\n const accountName = getAccountNameFromUrl(blobEndpoint);\n if (!blobEndpoint) {\n throw new Error(\"Invalid BlobEndpoint in the provided SAS Connection String\");\n } else if (!accountSas) {\n throw new Error(\"Invalid SharedAccessSignature in the provided SAS Connection String\");\n }\n\n return { kind: \"SASConnString\", url: blobEndpoint, accountName, accountSas };\n }\n}\n\n/**\n * Internal escape method implemented Strategy Two mentioned in escapeURL() description.\n *\n * @param text -\n */\nfunction escape(text: string): string {\n return encodeURIComponent(text)\n .replace(/%2F/g, \"/\") // Don't escape for \"/\"\n .replace(/'/g, \"%27\") // Escape for \"'\"\n .replace(/\\+/g, \"%20\")\n .replace(/%25/g, \"%\"); // Revert encoded \"%\"\n}\n\n/**\n * Append a string to URL path. Will remove duplicated \"/\" in front of the string\n * when URL path ends with a \"/\".\n *\n * @param url - Source URL string\n * @param name - String to be appended to URL\n * @returns An updated URL string\n */\nexport function appendToURLPath(url: string, name: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let path = urlParsed.getPath();\n path = path ? (path.endsWith(\"/\") ? `${path}${name}` : `${path}/${name}`) : name;\n urlParsed.setPath(path);\n\n return urlParsed.toString();\n}\n\n/**\n * Set URL parameter name and value. If name exists in URL parameters, old value\n * will be replaced by name key. If not provide value, the parameter will be deleted.\n *\n * @param url - Source URL string\n * @param name - Parameter name\n * @param value - Parameter value\n * @returns An updated URL string\n */\nexport function setURLParameter(url: string, name: string, value?: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setQueryParameter(name, value);\n return urlParsed.toString();\n}\n\n/**\n * Get URL parameter by name.\n *\n * @param url -\n * @param name -\n */\nexport function getURLParameter(url: string, name: string): string | string[] | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getQueryParameterValue(name);\n}\n\n/**\n * Set URL host.\n *\n * @param url - Source URL string\n * @param host - New host string\n * @returns An updated URL string\n */\nexport function setURLHost(url: string, host: string): string {\n const urlParsed = URLBuilder.parse(url);\n urlParsed.setHost(host);\n return urlParsed.toString();\n}\n\n/**\n * Get URL path from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPath(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getPath();\n}\n\n/**\n * Get URL scheme from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLScheme(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n return urlParsed.getScheme();\n}\n\n/**\n * Get URL path and query from an URL string.\n *\n * @param url - Source URL string\n */\nexport function getURLPathAndQuery(url: string): string | undefined {\n const urlParsed = URLBuilder.parse(url);\n const pathString = urlParsed.getPath();\n if (!pathString) {\n throw new RangeError(\"Invalid url without valid path.\");\n }\n\n let queryString = urlParsed.getQuery() || \"\";\n queryString = queryString.trim();\n if (queryString !== \"\") {\n queryString = queryString.startsWith(\"?\") ? queryString : `?${queryString}`; // Ensure query string start with '?'\n }\n\n return `${pathString}${queryString}`;\n}\n\n/**\n * Get URL query key value pairs from an URL string.\n *\n * @param url -\n */\nexport function getURLQueries(url: string): { [key: string]: string } {\n let queryString = URLBuilder.parse(url).getQuery();\n if (!queryString) {\n return {};\n }\n\n queryString = queryString.trim();\n queryString = queryString.startsWith(\"?\") ? queryString.substr(1) : queryString;\n\n let querySubStrings: string[] = queryString.split(\"&\");\n querySubStrings = querySubStrings.filter((value: string) => {\n const indexOfEqual = value.indexOf(\"=\");\n const lastIndexOfEqual = value.lastIndexOf(\"=\");\n return (\n indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1\n );\n });\n\n const queries: { [key: string]: string } = {};\n for (const querySubString of querySubStrings) {\n const splitResults = querySubString.split(\"=\");\n const key: string = splitResults[0];\n const value: string = splitResults[1];\n queries[key] = value;\n }\n\n return queries;\n}\n\n/**\n * Append a string to URL query.\n *\n * @param url - Source URL string.\n * @param queryParts - String to be appended to the URL query.\n * @returns An updated URL string.\n */\nexport function appendToURLQuery(url: string, queryParts: string): string {\n const urlParsed = URLBuilder.parse(url);\n\n let query = urlParsed.getQuery();\n if (query) {\n query += \"&\" + queryParts;\n } else {\n query = queryParts;\n }\n\n urlParsed.setQuery(query);\n return urlParsed.toString();\n}\n\n/**\n * Rounds a date off to seconds.\n *\n * @param date -\n * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned;\n * If false, YYYY-MM-DDThh:mm:ssZ will be returned.\n * @returns Date string in ISO8061 format, with or without 7 milliseconds component\n */\nexport function truncatedISO8061Date(date: Date, withMilliseconds: boolean = true): string {\n // Date.toISOString() will return like \"2018-10-29T06:34:36.139Z\"\n const dateString = date.toISOString();\n\n return withMilliseconds\n ? dateString.substring(0, dateString.length - 1) + \"0000\" + \"Z\"\n : dateString.substring(0, dateString.length - 5) + \"Z\";\n}\n\n/**\n * Base64 encode.\n *\n * @param content -\n */\nexport function base64encode(content: string): string {\n return !isNode ? btoa(content) : Buffer.from(content).toString(\"base64\");\n}\n\n/**\n * Base64 decode.\n *\n * @param encodedString -\n */\nexport function base64decode(encodedString: string): string {\n return !isNode ? atob(encodedString) : Buffer.from(encodedString, \"base64\").toString();\n}\n\n/**\n * Generate a 64 bytes base64 block ID string.\n *\n * @param blockIndex -\n */\nexport function generateBlockID(blockIDPrefix: string, blockIndex: number): string {\n // To generate a 64 bytes base64 string, source string should be 48\n const maxSourceStringLength = 48;\n\n // A blob can have a maximum of 100,000 uncommitted blocks at any given time\n const maxBlockIndexLength = 6;\n\n const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength;\n\n if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) {\n blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength);\n }\n const res =\n blockIDPrefix +\n padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, \"0\");\n return base64encode(res);\n}\n\n/**\n * Delay specified time interval.\n *\n * @param timeInMs -\n * @param aborter -\n * @param abortError -\n */\nexport async function delay(\n timeInMs: number,\n aborter?: AbortSignalLike,\n abortError?: Error\n): Promise {\n return new Promise((resolve, reject) => {\n /* eslint-disable-next-line prefer-const */\n let timeout: any;\n\n const abortHandler = () => {\n if (timeout !== undefined) {\n clearTimeout(timeout);\n }\n reject(abortError);\n };\n\n const resolveHandler = () => {\n if (aborter !== undefined) {\n aborter.removeEventListener(\"abort\", abortHandler);\n }\n resolve();\n };\n\n timeout = setTimeout(resolveHandler, timeInMs);\n\n if (aborter !== undefined) {\n aborter.addEventListener(\"abort\", abortHandler);\n }\n });\n}\n\n/**\n * String.prototype.padStart()\n *\n * @param currentString -\n * @param targetLength -\n * @param padString -\n */\nexport function padStart(\n currentString: string,\n targetLength: number,\n padString: string = \" \"\n): string {\n // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes\n if (String.prototype.padStart) {\n return currentString.padStart(targetLength, padString);\n }\n\n padString = padString || \" \";\n if (currentString.length > targetLength) {\n return currentString;\n } else {\n targetLength = targetLength - currentString.length;\n if (targetLength > padString.length) {\n padString += padString.repeat(targetLength / padString.length);\n }\n return padString.slice(0, targetLength) + currentString;\n }\n}\n\nexport function sanitizeURL(url: string): string {\n let safeURL: string = url;\n if (getURLParameter(safeURL, URLConstants.Parameters.SIGNATURE)) {\n safeURL = setURLParameter(safeURL, URLConstants.Parameters.SIGNATURE, \"*****\");\n }\n\n return safeURL;\n}\n\nexport function sanitizeHeaders(originalHeader: HttpHeaders): HttpHeaders {\n const headers: HttpHeaders = new HttpHeaders();\n for (const header of originalHeader.headersArray()) {\n if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) {\n headers.set(header.name, \"*****\");\n } else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) {\n headers.set(header.name, sanitizeURL(header.value));\n } else {\n headers.set(header.name, header.value);\n }\n }\n\n return headers;\n}\n/**\n * If two strings are equal when compared case insensitive.\n *\n * @param str1 -\n * @param str2 -\n */\nexport function iEqual(str1: string, str2: string): boolean {\n return str1.toLocaleLowerCase() === str2.toLocaleLowerCase();\n}\n\n/**\n * Extracts account name from the url\n * @param url - url to extract the account name from\n * @returns with the account name\n */\nexport function getAccountNameFromUrl(url: string): string {\n const parsedUrl: URLBuilder = URLBuilder.parse(url);\n let accountName;\n try {\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`;\n accountName = parsedUrl.getHost()!.split(\".\")[0];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/\n // .getPath() -> /devstoreaccount1/\n accountName = parsedUrl.getPath()!.split(\"/\")[1];\n } else {\n // Custom domain case: \"https://customdomain.com/containername/blob\".\n accountName = \"\";\n }\n return accountName;\n } catch (error: any) {\n throw new Error(\"Unable to extract accountName with provided information.\");\n }\n}\n\nexport function isIpEndpointStyle(parsedUrl: URLBuilder): boolean {\n if (parsedUrl.getHost() === undefined) {\n return false;\n }\n\n const host =\n parsedUrl.getHost()! + (parsedUrl.getPort() === undefined ? \"\" : \":\" + parsedUrl.getPort());\n\n // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'.\n // Case 2: localhost(:port), use broad regex to match port part.\n // Case 3: Ipv4, use broad regex which just check if host contains Ipv4.\n // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html.\n return /^.*:.*:.*$|^localhost(:[0-9]+)?$|^(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])(\\.(\\d|[1-9]\\d|1\\d\\d|2[0-4]\\d|25[0-5])){3}(:[0-9]+)?$/.test(\n host\n );\n}\n\n/**\n * Convert Tags to encoded string.\n *\n * @param tags -\n */\nexport function toBlobTagsString(tags?: Tags): string | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const tagPairs = [];\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`);\n }\n }\n\n return tagPairs.join(\"&\");\n}\n\n/**\n * Convert Tags type to BlobTags.\n *\n * @param tags -\n */\nexport function toBlobTags(tags?: Tags): BlobTags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: BlobTags = {\n blobTagSet: [],\n };\n\n for (const key in tags) {\n if (Object.prototype.hasOwnProperty.call(tags, key)) {\n const value = tags[key];\n res.blobTagSet.push({\n key,\n value,\n });\n }\n }\n return res;\n}\n\n/**\n * Covert BlobTags to Tags type.\n *\n * @param tags -\n */\nexport function toTags(tags?: BlobTags): Tags | undefined {\n if (tags === undefined) {\n return undefined;\n }\n\n const res: Tags = {};\n for (const blobTag of tags.blobTagSet) {\n res[blobTag.key] = blobTag.value;\n }\n return res;\n}\n\n/**\n * Convert BlobQueryTextConfiguration to QuerySerialization type.\n *\n * @param textConfiguration -\n */\nexport function toQuerySerialization(\n textConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration\n | BlobQueryParquetConfiguration\n): QuerySerialization | undefined {\n if (textConfiguration === undefined) {\n return undefined;\n }\n\n switch (textConfiguration.kind) {\n case \"csv\":\n return {\n format: {\n type: \"delimited\",\n delimitedTextConfiguration: {\n columnSeparator: textConfiguration.columnSeparator || \",\",\n fieldQuote: textConfiguration.fieldQuote || \"\",\n recordSeparator: textConfiguration.recordSeparator,\n escapeChar: textConfiguration.escapeCharacter || \"\",\n headersPresent: textConfiguration.hasHeaders || false,\n },\n },\n };\n case \"json\":\n return {\n format: {\n type: \"json\",\n jsonTextConfiguration: {\n recordSeparator: textConfiguration.recordSeparator,\n },\n },\n };\n case \"arrow\":\n return {\n format: {\n type: \"arrow\",\n arrowConfiguration: {\n schema: textConfiguration.schema,\n },\n },\n };\n case \"parquet\":\n return {\n format: {\n type: \"parquet\",\n },\n };\n\n default:\n throw Error(\"Invalid BlobQueryTextConfiguration.\");\n }\n}\n\nexport function parseObjectReplicationRecord(\n objectReplicationRecord?: Record\n): ObjectReplicationPolicy[] | undefined {\n if (!objectReplicationRecord) {\n return undefined;\n }\n\n if (\"policy-id\" in objectReplicationRecord) {\n // If the dictionary contains a key with policy id, we are not required to do any parsing since\n // the policy id should already be stored in the ObjectReplicationDestinationPolicyId.\n return undefined;\n }\n\n const orProperties: ObjectReplicationPolicy[] = [];\n for (const key in objectReplicationRecord) {\n const ids = key.split(\"_\");\n const policyPrefix = \"or-\";\n if (ids[0].startsWith(policyPrefix)) {\n ids[0] = ids[0].substring(policyPrefix.length);\n }\n const rule: ObjectReplicationRule = {\n ruleId: ids[1],\n replicationStatus: objectReplicationRecord[key] as ObjectReplicationStatus,\n };\n const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]);\n if (policyIndex > -1) {\n orProperties[policyIndex].rules.push(rule);\n } else {\n orProperties.push({\n policyId: ids[0],\n rules: [rule],\n });\n }\n }\n return orProperties;\n}\n\n/**\n * Attach a TokenCredential to an object.\n *\n * @param thing -\n * @param credential -\n */\nexport function attachCredential(thing: T, credential: TokenCredential): T {\n (thing as any).credential = credential;\n return thing;\n}\n\nexport function httpAuthorizationToString(\n httpAuthorization?: HttpAuthorization\n): string | undefined {\n return httpAuthorization ? httpAuthorization.scheme + \" \" + httpAuthorization.value : undefined;\n}\n\nexport function BlobNameToString(name: BlobName): string {\n if (name.encoded) {\n return decodeURIComponent(name.content!);\n } else {\n return name.content!;\n }\n}\n\nexport function ConvertInternalResponseOfListBlobFlat(\n internalResponse: ListBlobsFlatSegmentResponse\n): ListBlobsFlatSegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nexport function ConvertInternalResponseOfListBlobHierarchy(\n internalResponse: ListBlobsHierarchySegmentResponse\n): ListBlobsHierarchySegmentResponseModel {\n return {\n ...internalResponse,\n segment: {\n blobPrefixes: internalResponse.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefixModel = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItemInternalModel = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n };\n return blobItem;\n }),\n },\n };\n}\n\nfunction decodeBase64String(value: string): Uint8Array {\n if (isNode) {\n return Buffer.from(value, \"base64\");\n } else {\n const byteString = atob(value);\n const arr = new Uint8Array(byteString.length);\n for (let i = 0; i < byteString.length; i++) {\n arr[i] = byteString.charCodeAt(i);\n }\n return arr;\n }\n}\n\nfunction ParseBoolean(content: any) {\n if (content === undefined) return undefined;\n if (content === \"true\") return true;\n if (content === \"false\") return false;\n return undefined;\n}\n\nfunction ParseBlobName(blobNameInXML: any): BlobName {\n if (blobNameInXML[\"$\"] !== undefined && blobNameInXML[\"#\"] !== undefined) {\n return {\n encoded: ParseBoolean(blobNameInXML[\"$\"][\"Encoded\"]),\n content: blobNameInXML[\"#\"] as string,\n };\n } else {\n return {\n encoded: false,\n content: blobNameInXML as string,\n };\n }\n}\n\nfunction ParseBlobProperties(blobPropertiesInXML: any): BlobPropertiesInternal {\n const blobProperties = blobPropertiesInXML;\n if (blobPropertiesInXML[\"Creation-Time\"]) {\n blobProperties.createdOn = new Date(blobPropertiesInXML[\"Creation-Time\"] as string);\n delete blobProperties[\"Creation-Time\"];\n }\n\n if (blobPropertiesInXML[\"Last-Modified\"]) {\n blobProperties.lastModified = new Date(blobPropertiesInXML[\"Last-Modified\"] as string);\n delete blobProperties[\"Last-Modified\"];\n }\n\n if (blobPropertiesInXML[\"Etag\"]) {\n blobProperties.etag = blobPropertiesInXML[\"Etag\"] as string;\n delete blobProperties[\"Etag\"];\n }\n\n if (blobPropertiesInXML[\"Content-Length\"]) {\n blobProperties.contentLength = parseFloat(blobPropertiesInXML[\"Content-Length\"] as string);\n delete blobProperties[\"Content-Length\"];\n }\n\n if (blobPropertiesInXML[\"Content-Type\"]) {\n blobProperties.contentType = blobPropertiesInXML[\"Content-Type\"] as string;\n delete blobProperties[\"Content-Type\"];\n }\n\n if (blobPropertiesInXML[\"Content-Encoding\"]) {\n blobProperties.contentEncoding = blobPropertiesInXML[\"Content-Encoding\"] as string;\n delete blobProperties[\"Content-Encoding\"];\n }\n\n if (blobPropertiesInXML[\"Content-Language\"]) {\n blobProperties.contentLanguage = blobPropertiesInXML[\"Content-Language\"] as string;\n delete blobProperties[\"Content-Language\"];\n }\n\n if (blobPropertiesInXML[\"Content-MD5\"]) {\n blobProperties.contentMD5 = decodeBase64String(blobPropertiesInXML[\"Content-MD5\"] as string);\n delete blobProperties[\"Content-MD5\"];\n }\n\n if (blobPropertiesInXML[\"Content-Disposition\"]) {\n blobProperties.contentDisposition = blobPropertiesInXML[\"Content-Disposition\"] as string;\n delete blobProperties[\"Content-Disposition\"];\n }\n\n if (blobPropertiesInXML[\"Cache-Control\"]) {\n blobProperties.cacheControl = blobPropertiesInXML[\"Cache-Control\"] as string;\n delete blobProperties[\"Cache-Control\"];\n }\n\n if (blobPropertiesInXML[\"x-ms-blob-sequence-number\"]) {\n blobProperties.blobSequenceNumber = parseFloat(\n blobPropertiesInXML[\"x-ms-blob-sequence-number\"] as string\n );\n delete blobProperties[\"x-ms-blob-sequence-number\"];\n }\n\n if (blobPropertiesInXML[\"BlobType\"]) {\n blobProperties.blobType = blobPropertiesInXML[\"BlobType\"] as BlobType;\n delete blobProperties[\"BlobType\"];\n }\n\n if (blobPropertiesInXML[\"LeaseStatus\"]) {\n blobProperties.leaseStatus = blobPropertiesInXML[\"LeaseStatus\"] as LeaseStatusType;\n delete blobProperties[\"LeaseStatus\"];\n }\n\n if (blobPropertiesInXML[\"LeaseState\"]) {\n blobProperties.leaseState = blobPropertiesInXML[\"LeaseState\"] as LeaseStateType;\n delete blobProperties[\"LeaseState\"];\n }\n\n if (blobPropertiesInXML[\"LeaseDuration\"]) {\n blobProperties.leaseDuration = blobPropertiesInXML[\"LeaseDuration\"] as LeaseDurationType;\n delete blobProperties[\"LeaseDuration\"];\n }\n\n if (blobPropertiesInXML[\"CopyId\"]) {\n blobProperties.copyId = blobPropertiesInXML[\"CopyId\"] as string;\n delete blobProperties[\"CopyId\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatus\"]) {\n blobProperties.copyStatus = blobPropertiesInXML[\"CopyStatus\"] as CopyStatusType;\n delete blobProperties[\"CopyStatus\"];\n }\n\n if (blobPropertiesInXML[\"CopySource\"]) {\n blobProperties.copySource = blobPropertiesInXML[\"CopySource\"] as string;\n delete blobProperties[\"CopySource\"];\n }\n\n if (blobPropertiesInXML[\"CopyProgress\"]) {\n blobProperties.copyProgress = blobPropertiesInXML[\"CopyProgress\"] as string;\n delete blobProperties[\"CopyProgress\"];\n }\n\n if (blobPropertiesInXML[\"CopyCompletionTime\"]) {\n blobProperties.copyCompletedOn = new Date(blobPropertiesInXML[\"CopyCompletionTime\"] as string);\n delete blobProperties[\"CopyCompletionTime\"];\n }\n\n if (blobPropertiesInXML[\"CopyStatusDescription\"]) {\n blobProperties.copyStatusDescription = blobPropertiesInXML[\"CopyStatusDescription\"] as string;\n delete blobProperties[\"CopyStatusDescription\"];\n }\n\n if (blobPropertiesInXML[\"ServerEncrypted\"]) {\n blobProperties.serverEncrypted = ParseBoolean(blobPropertiesInXML[\"ServerEncrypted\"]);\n delete blobProperties[\"ServerEncrypted\"];\n }\n\n if (blobPropertiesInXML[\"IncrementalCopy\"]) {\n blobProperties.incrementalCopy = ParseBoolean(blobPropertiesInXML[\"IncrementalCopy\"]);\n delete blobProperties[\"IncrementalCopy\"];\n }\n\n if (blobPropertiesInXML[\"DestinationSnapshot\"]) {\n blobProperties.destinationSnapshot = blobPropertiesInXML[\"DestinationSnapshot\"] as string;\n delete blobProperties[\"DestinationSnapshot\"];\n }\n\n if (blobPropertiesInXML[\"DeletedTime\"]) {\n blobProperties.deletedOn = new Date(blobPropertiesInXML[\"DeletedTime\"] as string);\n delete blobProperties[\"DeletedTime\"];\n }\n\n if (blobPropertiesInXML[\"RemainingRetentionDays\"]) {\n blobProperties.remainingRetentionDays = parseFloat(\n blobPropertiesInXML[\"RemainingRetentionDays\"] as string\n );\n delete blobProperties[\"RemainingRetentionDays\"];\n }\n\n if (blobPropertiesInXML[\"AccessTier\"]) {\n blobProperties.accessTier = blobPropertiesInXML[\"AccessTier\"] as AccessTier;\n delete blobProperties[\"AccessTier\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierInferred\"]) {\n blobProperties.accessTierInferred = ParseBoolean(blobPropertiesInXML[\"AccessTierInferred\"]);\n delete blobProperties[\"AccessTierInferred\"];\n }\n\n if (blobPropertiesInXML[\"ArchiveStatus\"]) {\n blobProperties.archiveStatus = blobPropertiesInXML[\"ArchiveStatus\"] as ArchiveStatus;\n delete blobProperties[\"ArchiveStatus\"];\n }\n\n if (blobPropertiesInXML[\"CustomerProvidedKeySha256\"]) {\n blobProperties.customerProvidedKeySha256 = blobPropertiesInXML[\n \"CustomerProvidedKeySha256\"\n ] as string;\n delete blobProperties[\"CustomerProvidedKeySha256\"];\n }\n\n if (blobPropertiesInXML[\"EncryptionScope\"]) {\n blobProperties.encryptionScope = blobPropertiesInXML[\"EncryptionScope\"] as string;\n delete blobProperties[\"EncryptionScope\"];\n }\n\n if (blobPropertiesInXML[\"AccessTierChangeTime\"]) {\n blobProperties.accessTierChangedOn = new Date(\n blobPropertiesInXML[\"AccessTierChangeTime\"] as string\n );\n delete blobProperties[\"AccessTierChangeTime\"];\n }\n\n if (blobPropertiesInXML[\"TagCount\"]) {\n blobProperties.tagCount = parseFloat(blobPropertiesInXML[\"TagCount\"] as string);\n delete blobProperties[\"TagCount\"];\n }\n\n if (blobPropertiesInXML[\"Expiry-Time\"]) {\n blobProperties.expiresOn = new Date(blobPropertiesInXML[\"Expiry-Time\"] as string);\n delete blobProperties[\"Expiry-Time\"];\n }\n\n if (blobPropertiesInXML[\"Sealed\"]) {\n blobProperties.isSealed = ParseBoolean(blobPropertiesInXML[\"Sealed\"]);\n delete blobProperties[\"Sealed\"];\n }\n\n if (blobPropertiesInXML[\"RehydratePriority\"]) {\n blobProperties.rehydratePriority = blobPropertiesInXML[\n \"RehydratePriority\"\n ] as RehydratePriority;\n delete blobProperties[\"RehydratePriority\"];\n }\n\n if (blobPropertiesInXML[\"LastAccessTime\"]) {\n blobProperties.lastAccessedOn = new Date(blobPropertiesInXML[\"LastAccessTime\"] as string);\n delete blobProperties[\"LastAccessTime\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"]) {\n blobProperties.immutabilityPolicyExpiresOn = new Date(\n blobPropertiesInXML[\"ImmutabilityPolicyUntilDate\"] as string\n );\n delete blobProperties[\"ImmutabilityPolicyUntilDate\"];\n }\n\n if (blobPropertiesInXML[\"ImmutabilityPolicyMode\"]) {\n blobProperties.immutabilityPolicyMode = blobPropertiesInXML[\n \"ImmutabilityPolicyMode\"\n ] as BlobImmutabilityPolicyMode;\n delete blobProperties[\"ImmutabilityPolicyMode\"];\n }\n\n if (blobPropertiesInXML[\"LegalHold\"]) {\n blobProperties.legalHold = ParseBoolean(blobPropertiesInXML[\"LegalHold\"]);\n delete blobProperties[\"LegalHold\"];\n }\n\n return blobProperties;\n}\n\nfunction ParseBlobItem(blobInXML: any): BlobItemInternal {\n const blobItem = blobInXML;\n blobItem.properties = ParseBlobProperties(blobInXML[\"Properties\"]);\n delete blobItem[\"Properties\"];\n\n blobItem.name = ParseBlobName(blobInXML[\"Name\"]);\n delete blobItem[\"Name\"];\n blobItem.deleted = ParseBoolean(blobInXML[\"Deleted\"])!;\n delete blobItem[\"Deleted\"];\n\n if (blobInXML[\"Snapshot\"]) {\n blobItem.snapshot = blobInXML[\"Snapshot\"] as string;\n delete blobItem[\"Snapshot\"];\n }\n\n if (blobInXML[\"VersionId\"]) {\n blobItem.versionId = blobInXML[\"VersionId\"] as string;\n delete blobItem[\"VersionId\"];\n }\n\n if (blobInXML[\"IsCurrentVersion\"]) {\n blobItem.isCurrentVersion = ParseBoolean(blobInXML[\"IsCurrentVersion\"]);\n delete blobItem[\"IsCurrentVersion\"];\n }\n\n if (blobInXML[\"Metadata\"]) {\n blobItem.metadata = blobInXML[\"Metadata\"];\n delete blobItem[\"Metadata\"];\n }\n\n if (blobInXML[\"Tags\"]) {\n blobItem.blobTags = ParseBlobTags(blobInXML[\"Tags\"]);\n delete blobItem[\"Tags\"];\n }\n\n if (blobInXML[\"OrMetadata\"]) {\n blobItem.objectReplicationMetadata = blobInXML[\"OrMetadata\"];\n delete blobItem[\"OrMetadata\"];\n }\n\n if (blobInXML[\"HasVersionsOnly\"]) {\n blobItem.hasVersionsOnly = ParseBoolean(blobInXML[\"HasVersionsOnly\"]);\n delete blobItem[\"HasVersionsOnly\"];\n }\n return blobItem;\n}\n\nfunction ParseBlobPrefix(blobPrefixInXML: any): BlobPrefix {\n return {\n name: ParseBlobName(blobPrefixInXML[\"Name\"]),\n };\n}\n\nfunction ParseBlobTag(blobTagInXML: any): BlobTag {\n return {\n key: blobTagInXML[\"Key\"],\n value: blobTagInXML[\"Value\"],\n };\n}\n\nfunction ParseBlobTags(blobTagsInXML: any): BlobTags | undefined {\n if (\n blobTagsInXML === undefined ||\n blobTagsInXML[\"TagSet\"] === undefined ||\n blobTagsInXML[\"TagSet\"][\"Tag\"] === undefined\n ) {\n return undefined;\n }\n\n const blobTagSet = [];\n if (blobTagsInXML[\"TagSet\"][\"Tag\"] instanceof Array) {\n blobTagsInXML[\"TagSet\"][\"Tag\"].forEach((blobTagInXML: any) => {\n blobTagSet.push(ParseBlobTag(blobTagInXML));\n });\n } else {\n blobTagSet.push(ParseBlobTag(blobTagsInXML[\"TagSet\"][\"Tag\"]));\n }\n\n return { blobTagSet: blobTagSet };\n}\n\nexport function ProcessBlobItems(blobArrayInXML: any[]): BlobItemInternal[] {\n const blobItems = [];\n\n if (blobArrayInXML instanceof Array) {\n blobArrayInXML.forEach((blobInXML: any) => {\n blobItems.push(ParseBlobItem(blobInXML));\n });\n } else {\n blobItems.push(ParseBlobItem(blobArrayInXML));\n }\n\n return blobItems;\n}\n\nexport function ProcessBlobPrefixes(blobPrefixesInXML: any[]): BlobPrefix[] {\n const blobPrefixes = [];\n\n if (blobPrefixesInXML instanceof Array) {\n blobPrefixesInXML.forEach((blobPrefixInXML: any) => {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML));\n });\n } else {\n blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML));\n }\n\n return blobPrefixes;\n}\n\nexport function* ExtractPageRangeInfoItems(\n getPageRangesSegment: PageBlobGetPageRangesDiffResponseModel\n): IterableIterator {\n let pageRange: PageRange[] = [];\n let clearRange: ClearRange[] = [];\n\n if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange;\n if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange;\n\n let pageRangeIndex = 0;\n let clearRangeIndex = 0;\n\n while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) {\n if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n ++pageRangeIndex;\n } else {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n ++clearRangeIndex;\n }\n }\n\n for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) {\n yield {\n start: pageRange[pageRangeIndex].start,\n end: pageRange[pageRangeIndex].end,\n isClear: false,\n };\n }\n\n for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) {\n yield {\n start: clearRange[clearRangeIndex].start,\n end: clearRange[clearRangeIndex].end,\n isClear: true,\n };\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants, URLConstants } from \"../utils/constants\";\nimport { setURLParameter } from \"../utils/utils.common\";\n\n/**\n * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including:\n *\n * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'.\n * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL\n * thus avoid the browser cache.\n *\n * 2. Remove cookie header for security\n *\n * 3. Remove content-length header to avoid browsers warning\n */\nexport class StorageBrowserPolicy extends BaseRequestPolicy {\n /**\n * Creates an instance of StorageBrowserPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n return this._nextPolicy.sendRequest(request);\n }\n\n if (request.method.toUpperCase() === \"GET\" || request.method.toUpperCase() === \"HEAD\") {\n request.url = setURLParameter(\n request.url,\n URLConstants.Parameters.FORCE_BROWSER_NO_CACHE,\n new Date().getTime().toString()\n );\n }\n\n request.headers.remove(HeaderConstants.COOKIE);\n\n // According to XHR standards, content-length should be fully controlled by browsers\n request.headers.remove(HeaderConstants.CONTENT_LENGTH);\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageBrowserPolicy } from \"./policies/StorageBrowserPolicy\";\nexport { StorageBrowserPolicy };\n\n/**\n * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects.\n */\nexport class StorageBrowserPolicyFactory implements RequestPolicyFactory {\n /**\n * Creates a StorageBrowserPolicyFactory object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy {\n return new StorageBrowserPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortError } from \"@azure/abort-controller\";\n\nimport {\n AbortSignalLike,\n BaseRequestPolicy,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n RestError,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { StorageRetryOptions } from \"../StorageRetryPolicyFactory\";\nimport { URLConstants } from \"../utils/constants\";\nimport { delay, setURLHost, setURLParameter } from \"../utils/utils.common\";\nimport { logger } from \"../log\";\n\n/**\n * A factory method used to generated a RetryPolicy factory.\n *\n * @param retryOptions -\n */\nexport function NewRetryPolicyFactory(retryOptions?: StorageRetryOptions): RequestPolicyFactory {\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy => {\n return new StorageRetryPolicy(nextPolicy, options, retryOptions);\n },\n };\n}\n\n/**\n * RetryPolicy types.\n */\nexport enum StorageRetryPolicyType {\n /**\n * Exponential retry. Retry time delay grows exponentially.\n */\n EXPONENTIAL,\n /**\n * Linear retry. Retry time delay grows linearly.\n */\n FIXED,\n}\n\n// Default values of StorageRetryOptions\nconst DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {\n maxRetryDelayInMs: 120 * 1000,\n maxTries: 4,\n retryDelayInMs: 4 * 1000,\n retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,\n secondaryHost: \"\",\n tryTimeoutInMs: undefined, // Use server side default timeout strategy\n};\n\nconst RETRY_ABORT_ERROR = new AbortError(\"The operation was aborted.\");\n\n/**\n * Retry policy with exponential retry and linear retry implemented.\n */\nexport class StorageRetryPolicy extends BaseRequestPolicy {\n /**\n * RetryOptions.\n */\n private readonly retryOptions: StorageRetryOptions;\n\n /**\n * Creates an instance of RetryPolicy.\n *\n * @param nextPolicy -\n * @param options -\n * @param retryOptions -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n retryOptions: StorageRetryOptions = DEFAULT_RETRY_OPTIONS\n ) {\n super(nextPolicy, options);\n\n // Initialize retry options\n this.retryOptions = {\n retryPolicyType: retryOptions.retryPolicyType\n ? retryOptions.retryPolicyType\n : DEFAULT_RETRY_OPTIONS.retryPolicyType,\n\n maxTries:\n retryOptions.maxTries && retryOptions.maxTries >= 1\n ? Math.floor(retryOptions.maxTries)\n : DEFAULT_RETRY_OPTIONS.maxTries,\n\n tryTimeoutInMs:\n retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0\n ? retryOptions.tryTimeoutInMs\n : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs,\n\n retryDelayInMs:\n retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0\n ? Math.min(\n retryOptions.retryDelayInMs,\n retryOptions.maxRetryDelayInMs\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs!\n )\n : DEFAULT_RETRY_OPTIONS.retryDelayInMs,\n\n maxRetryDelayInMs:\n retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0\n ? retryOptions.maxRetryDelayInMs\n : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs,\n\n secondaryHost: retryOptions.secondaryHost\n ? retryOptions.secondaryHost\n : DEFAULT_RETRY_OPTIONS.secondaryHost,\n };\n }\n\n /**\n * Sends request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n return this.attemptSendRequest(request, false, 1);\n }\n\n /**\n * Decide and perform next retry. Won't mutate request parameter.\n *\n * @param request -\n * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then\n * the resource was not found. This may be due to replication delay. So, in this\n * case, we'll never try the secondary again for this operation.\n * @param attempt - How many retries has been attempted to performed, starting from 1, which includes\n * the attempt will be performed by this method call.\n */\n protected async attemptSendRequest(\n request: WebResource,\n secondaryHas404: boolean,\n attempt: number\n ): Promise {\n const newRequest: WebResource = request.clone();\n\n const isPrimaryRetry =\n secondaryHas404 ||\n !this.retryOptions.secondaryHost ||\n !(request.method === \"GET\" || request.method === \"HEAD\" || request.method === \"OPTIONS\") ||\n attempt % 2 === 1;\n\n if (!isPrimaryRetry) {\n newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost!);\n }\n\n // Set the server-side timeout query parameter \"timeout=[seconds]\"\n if (this.retryOptions.tryTimeoutInMs) {\n newRequest.url = setURLParameter(\n newRequest.url,\n URLConstants.Parameters.TIMEOUT,\n Math.floor(this.retryOptions.tryTimeoutInMs! / 1000).toString()\n );\n }\n\n let response: HttpOperationResponse | undefined;\n try {\n logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? \"Primary\" : \"Secondary\"}`);\n response = await this._nextPolicy.sendRequest(newRequest);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response)) {\n return response;\n }\n\n secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404);\n } catch (err: any) {\n logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`);\n if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) {\n throw err;\n }\n }\n\n await this.delay(isPrimaryRetry, attempt, request.abortSignal);\n return this.attemptSendRequest(request, secondaryHas404, ++attempt);\n }\n\n /**\n * Decide whether to retry according to last HTTP response and retry counters.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param response -\n * @param err -\n */\n protected shouldRetry(\n isPrimaryRetry: boolean,\n attempt: number,\n response?: HttpOperationResponse,\n err?: RestError\n ): boolean {\n if (attempt >= this.retryOptions.maxTries!) {\n logger.info(\n `RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions\n .maxTries!}, no further try.`\n );\n return false;\n }\n\n // Handle network failures, you may need to customize the list when you implement\n // your own http client\n const retriableErrors = [\n \"ETIMEDOUT\",\n \"ESOCKETTIMEDOUT\",\n \"ECONNREFUSED\",\n \"ECONNRESET\",\n \"ENOENT\",\n \"ENOTFOUND\",\n \"TIMEOUT\",\n \"EPIPE\",\n \"REQUEST_SEND_ERROR\", // For default xhr based http client provided in ms-rest-js\n ];\n if (err) {\n for (const retriableError of retriableErrors) {\n if (\n err.name.toUpperCase().includes(retriableError) ||\n err.message.toUpperCase().includes(retriableError) ||\n (err.code && err.code.toString().toUpperCase() === retriableError)\n ) {\n logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`);\n return true;\n }\n }\n }\n\n // If attempt was against the secondary & it returned a StatusNotFound (404), then\n // the resource was not found. This may be due to replication delay. So, in this\n // case, we'll never try the secondary again for this operation.\n if (response || err) {\n const statusCode = response ? response.status : err ? err.statusCode : 0;\n if (!isPrimaryRetry && statusCode === 404) {\n logger.info(`RetryPolicy: Secondary access with 404, will retry.`);\n return true;\n }\n\n // Server internal error or server timeout\n if (statusCode === 503 || statusCode === 500) {\n logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`);\n return true;\n }\n }\n\n if (err?.code === \"PARSE_ERROR\" && err?.message.startsWith(`Error \"Error: Unclosed root tag`)) {\n logger.info(\n \"RetryPolicy: Incomplete XML response likely due to service timeout, will retry.\"\n );\n return true;\n }\n\n return false;\n }\n\n /**\n * Delay a calculated time between retries.\n *\n * @param isPrimaryRetry -\n * @param attempt -\n * @param abortSignal -\n */\n private async delay(isPrimaryRetry: boolean, attempt: number, abortSignal?: AbortSignalLike) {\n let delayTimeInMs: number = 0;\n\n if (isPrimaryRetry) {\n switch (this.retryOptions.retryPolicyType) {\n case StorageRetryPolicyType.EXPONENTIAL:\n delayTimeInMs = Math.min(\n (Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs!,\n this.retryOptions.maxRetryDelayInMs!\n );\n break;\n case StorageRetryPolicyType.FIXED:\n delayTimeInMs = this.retryOptions.retryDelayInMs!;\n break;\n }\n } else {\n delayTimeInMs = Math.random() * 1000;\n }\n\n logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`);\n return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { StorageRetryPolicy, StorageRetryPolicyType } from \"./policies/StorageRetryPolicy\";\n\nexport { StorageRetryPolicyType, StorageRetryPolicy };\n\n/**\n * Storage Blob retry options interface.\n */\nexport interface StorageRetryOptions {\n /**\n * Optional. StorageRetryPolicyType, default is exponential retry policy.\n */\n readonly retryPolicyType?: StorageRetryPolicyType;\n\n /**\n * Optional. Max try number of attempts, default is 4.\n * A value of 1 means 1 try and no retries.\n * A value smaller than 1 means default retry number of attempts.\n */\n readonly maxTries?: number;\n\n /**\n * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request.\n * A value of zero or undefined means no default timeout on SDK client, Azure\n * Storage server's default timeout policy will be used.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations\n */\n readonly tryTimeoutInMs?: number;\n\n /**\n * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms).\n * The delay increases (exponentially or linearly) with each retry up to a maximum specified by\n * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs.\n */\n readonly retryDelayInMs?: number;\n\n /**\n * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms).\n * If you specify 0, then you must also specify 0 for retryDelayInMs.\n */\n readonly maxRetryDelayInMs?: number;\n\n /**\n * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined\n * (the default) then operations are not retried against another host.\n *\n * NOTE: Before setting this field, make sure you understand the issues around\n * reading stale and potentially-inconsistent data at\n * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs}\n */\n readonly secondaryHost?: string;\n}\n\n/**\n * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects.\n */\nexport class StorageRetryPolicyFactory implements RequestPolicyFactory {\n private retryOptions?: StorageRetryOptions;\n\n /**\n * Creates an instance of StorageRetryPolicyFactory.\n * @param retryOptions -\n */\n constructor(retryOptions?: StorageRetryOptions) {\n this.retryOptions = retryOptions;\n }\n\n /**\n * Creates a StorageRetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy {\n return new StorageRetryPolicy(nextPolicy, options, this.retryOptions);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BaseRequestPolicy, HttpOperationResponse, WebResource } from \"@azure/core-http\";\n\n/**\n * Credential policy used to sign HTTP(S) requests before sending. This is an\n * abstract class.\n */\nexport abstract class CredentialPolicy extends BaseRequestPolicy {\n /**\n * Sends out request.\n *\n * @param request -\n */\n public sendRequest(request: WebResource): Promise {\n return this._nextPolicy.sendRequest(this.signRequest(request));\n }\n\n /**\n * Child classes must implement this method with request signing. This method\n * will be executed in {@link sendRequest}.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n // Child classes must override this method with request signing. This method\n // will be executed in sendRequest().\n return request;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources\n * or for use with Shared Access Signatures (SAS).\n */\nexport class AnonymousCredentialPolicy extends CredentialPolicy {\n /**\n * Creates an instance of AnonymousCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n */\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyFactory, RequestPolicyOptions } from \"@azure/core-http\";\nimport { CredentialPolicy } from \"../policies/CredentialPolicy\";\n\n/**\n * Credential is an abstract class for Azure Storage HTTP requests signing. This\n * class will host an credentialPolicyCreator factory which generates CredentialPolicy.\n */\nexport abstract class Credential implements RequestPolicyFactory {\n /**\n * Creates a RequestPolicy object.\n *\n * @param _nextPolicy -\n * @param _options -\n */\n public create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy {\n throw new Error(\"Method should be implemented in children classes.\");\n }\n}\n\n/**\n * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy.\n */\nexport type CredentialPolicyCreator = (\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n) => CredentialPolicy;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { AnonymousCredentialPolicy } from \"../policies/AnonymousCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * AnonymousCredential provides a credentialPolicyCreator member used to create\n * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with\n * HTTP(S) requests that read public resources or for use with Shared Access\n * Signatures (SAS).\n */\nexport class AnonymousCredential extends Credential {\n /**\n * Creates an {@link AnonymousCredentialPolicy} object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): AnonymousCredentialPolicy {\n return new AnonymousCredentialPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n HttpHeaders,\n HttpOperationResponse,\n isNode,\n RequestPolicy,\n RequestPolicyOptions,\n WebResource,\n} from \"@azure/core-http\";\n\nimport { HeaderConstants } from \"../utils/constants\";\n\n/**\n * TelemetryPolicy is a policy used to tag user-agent header for every requests.\n */\nexport class TelemetryPolicy extends BaseRequestPolicy {\n /**\n * Telemetry string.\n */\n public readonly telemetry: string;\n\n /**\n * Creates an instance of TelemetryPolicy.\n * @param nextPolicy -\n * @param options -\n * @param telemetry -\n */\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, telemetry: string) {\n super(nextPolicy, options);\n this.telemetry = telemetry;\n }\n\n /**\n * Sends out request.\n *\n * @param request -\n */\n public async sendRequest(request: WebResource): Promise {\n if (isNode) {\n if (!request.headers) {\n request.headers = new HttpHeaders();\n }\n if (!request.headers.get(HeaderConstants.USER_AGENT)) {\n request.headers.set(HeaderConstants.USER_AGENT, this.telemetry);\n }\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n isNode,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n UserAgentOptions,\n} from \"@azure/core-http\";\nimport * as os from \"os\";\n\nimport { TelemetryPolicy } from \"./policies/TelemetryPolicy\";\nimport { SDK_VERSION } from \"./utils/constants\";\n\n/**\n * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects.\n */\nexport class TelemetryPolicyFactory implements RequestPolicyFactory {\n /**\n * @internal\n */\n public readonly telemetryString: string;\n\n /**\n * Creates an instance of TelemetryPolicyFactory.\n * @param telemetry -\n */\n constructor(telemetry?: UserAgentOptions) {\n const userAgentInfo: string[] = [];\n\n if (isNode) {\n if (telemetry) {\n const telemetryString = telemetry.userAgentPrefix || \"\";\n if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) {\n userAgentInfo.push(telemetryString);\n }\n }\n\n // e.g. azsdk-js-storageblob/10.0.0\n const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`;\n if (userAgentInfo.indexOf(libInfo) === -1) {\n userAgentInfo.push(libInfo);\n }\n\n // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299)\n let runtimeInfo = `(NODE-VERSION ${process.version})`;\n if (os) {\n runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`;\n }\n if (userAgentInfo.indexOf(runtimeInfo) === -1) {\n userAgentInfo.push(runtimeInfo);\n }\n }\n\n this.telemetryString = userAgentInfo.join(\" \");\n }\n\n /**\n * Creates a TelemetryPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): TelemetryPolicy {\n return new TelemetryPolicy(nextPolicy, options, this.telemetryString);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { DefaultHttpClient } from \"@azure/core-http\";\nimport { IHttpClient } from \"../Pipeline\";\n\nconst _defaultHttpClient = new DefaultHttpClient();\n\nexport function getCachedDefaultHttpClient(): IHttpClient {\n return _defaultHttpClient;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccessToken, GetTokenOptions, TokenCredential, URLBuilder } from \"@azure/core-http\";\nimport {\n BaseRequestPolicy,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n} from \"@azure/core-http\";\nimport { HttpOperationResponse } from \"@azure/core-http\";\nimport { WebResourceLike } from \"@azure/core-http\";\nimport { delay } from \"@azure/core-http\";\n\n/**\n * A set of constants used internally when processing requests.\n */\nconst Constants = {\n DefaultScope: \"/.default\",\n /**\n * Defines constants for use with HTTP headers.\n */\n HeaderConstants: {\n /**\n * The Authorization header.\n */\n AUTHORIZATION: \"authorization\",\n },\n};\n\n// #region Access Token Cycler\n\n/**\n * A function that gets a promise of an access token and allows providing\n * options.\n *\n * @param options - the options to pass to the underlying token provider\n */\ntype AccessTokenGetter = (options: GetTokenOptions) => Promise;\n\ninterface TokenCyclerOptions {\n /**\n * The window of time before token expiration during which the token will be\n * considered unusable due to risk of the token expiring before sending the\n * request.\n *\n * This will only become meaningful if the refresh fails for over\n * (refreshWindow - forcedRefreshWindow) milliseconds.\n */\n forcedRefreshWindowInMs: number;\n /**\n * Interval in milliseconds to retry failed token refreshes.\n */\n retryIntervalInMs: number;\n /**\n * The window of time before token expiration during which\n * we will attempt to refresh the token.\n */\n refreshWindowInMs: number;\n}\n\n// Default options for the cycler if none are provided\nconst DEFAULT_CYCLER_OPTIONS: TokenCyclerOptions = {\n forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires\n retryIntervalInMs: 3000, // Allow refresh attempts every 3s\n refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry\n};\n\n/**\n * Converts an an unreliable access token getter (which may resolve with null)\n * into an AccessTokenGetter by retrying the unreliable getter in a regular\n * interval.\n *\n * @param getAccessToken - a function that produces a promise of an access\n * token that may fail by returning null\n * @param retryIntervalInMs - the time (in milliseconds) to wait between retry\n * attempts\n * @param timeoutInMs - the timestamp after which the refresh attempt will fail,\n * throwing an exception\n * @returns - a promise that, if it resolves, will resolve with an access token\n */\nasync function beginRefresh(\n getAccessToken: () => Promise,\n retryIntervalInMs: number,\n timeoutInMs: number\n): Promise {\n // This wrapper handles exceptions gracefully as long as we haven't exceeded\n // the timeout.\n async function tryGetAccessToken(): Promise {\n if (Date.now() < timeoutInMs) {\n try {\n return await getAccessToken();\n } catch {\n return null;\n }\n } else {\n const finalToken = await getAccessToken();\n\n // Timeout is up, so throw if it's still null\n if (finalToken === null) {\n throw new Error(\"Failed to refresh access token.\");\n }\n\n return finalToken;\n }\n }\n\n let token: AccessToken | null = await tryGetAccessToken();\n\n while (token === null) {\n await delay(retryIntervalInMs);\n\n token = await tryGetAccessToken();\n }\n\n return token;\n}\n\n/**\n * Creates a token cycler from a credential, scopes, and optional settings.\n *\n * A token cycler represents a way to reliably retrieve a valid access token\n * from a TokenCredential. It will handle initializing the token, refreshing it\n * when it nears expiration, and synchronizes refresh attempts to avoid\n * concurrency hazards.\n *\n * @param credential - the underlying TokenCredential that provides the access\n * token\n * @param scopes - the scopes to request authorization for\n * @param tokenCyclerOptions - optionally override default settings for the cycler\n *\n * @returns - a function that reliably produces a valid access token\n */\nfunction createTokenCycler(\n credential: TokenCredential,\n scopes: string | string[],\n tokenCyclerOptions?: Partial\n): AccessTokenGetter {\n let refreshWorker: Promise | null = null;\n let token: AccessToken | null = null;\n\n const options = {\n ...DEFAULT_CYCLER_OPTIONS,\n ...tokenCyclerOptions,\n };\n\n /**\n * This little holder defines several predicates that we use to construct\n * the rules of refreshing the token.\n */\n const cycler = {\n /**\n * Produces true if a refresh job is currently in progress.\n */\n get isRefreshing(): boolean {\n return refreshWorker !== null;\n },\n /**\n * Produces true if the cycler SHOULD refresh (we are within the refresh\n * window and not already refreshing)\n */\n get shouldRefresh(): boolean {\n return (\n !cycler.isRefreshing &&\n (token?.expiresOnTimestamp ?? 0) - options.refreshWindowInMs < Date.now()\n );\n },\n /**\n * Produces true if the cycler MUST refresh (null or nearly-expired\n * token).\n */\n get mustRefresh(): boolean {\n return (\n token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()\n );\n },\n };\n\n /**\n * Starts a refresh job or returns the existing job if one is already\n * running.\n */\n function refresh(getTokenOptions: GetTokenOptions): Promise {\n if (!cycler.isRefreshing) {\n // We bind `scopes` here to avoid passing it around a lot\n const tryGetAccessToken = (): Promise =>\n credential.getToken(scopes, getTokenOptions);\n\n // Take advantage of promise chaining to insert an assignment to `token`\n // before the refresh can be considered done.\n refreshWorker = beginRefresh(\n tryGetAccessToken,\n options.retryIntervalInMs,\n // If we don't have a token, then we should timeout immediately\n token?.expiresOnTimestamp ?? Date.now()\n )\n .then((_token) => {\n refreshWorker = null;\n token = _token;\n return token;\n })\n .catch((reason) => {\n // We also should reset the refresher if we enter a failed state. All\n // existing awaiters will throw, but subsequent requests will start a\n // new retry chain.\n refreshWorker = null;\n token = null;\n throw reason;\n });\n }\n\n return refreshWorker as Promise;\n }\n\n return async (tokenOptions: GetTokenOptions): Promise => {\n //\n // Simple rules:\n // - If we MUST refresh, then return the refresh task, blocking\n // the pipeline until a token is available.\n // - If we SHOULD refresh, then run refresh but don't return it\n // (we can still use the cached token).\n // - Return the token, since it's fine if we didn't return in\n // step 1.\n //\n\n if (cycler.mustRefresh) return refresh(tokenOptions);\n\n if (cycler.shouldRefresh) {\n refresh(tokenOptions);\n }\n\n return token as AccessToken;\n };\n}\n/**\n * We will retrieve the challenge only if the response status code was 401,\n * and if the response contained the header \"WWW-Authenticate\" with a non-empty value.\n */\nfunction getChallenge(response: HttpOperationResponse): string | undefined {\n const challenge = response.headers.get(\"WWW-Authenticate\");\n if (response.status === 401 && challenge) {\n return challenge;\n }\n return;\n}\n\n/**\n * Challenge structure\n */\ninterface Challenge {\n authorization_uri: string;\n resource_id: string;\n}\n\n/**\n * Converts: `Bearer a=\"b\" c=\"d\"`.\n * Into: `[ { a: 'b', c: 'd' }]`.\n *\n * @internal\n */\nfunction parseChallenge(challenge: string): any {\n const bearerChallenge = challenge.slice(\"Bearer \".length);\n const challengeParts = `${bearerChallenge.trim()} `.split(\" \").filter((x) => x);\n const keyValuePairs = challengeParts.map((keyValue) =>\n (([key, value]) => ({ [key]: value }))(keyValue.trim().split(\"=\"))\n );\n // Key-value pairs to plain object:\n return keyValuePairs.reduce((a, b) => ({ ...a, ...b }), {});\n}\n\n// #endregion\n\n/**\n * Creates a new factory for a RequestPolicy that applies a bearer token to\n * the requests' `Authorization` headers.\n *\n * @param credential - The TokenCredential implementation that can supply the bearer token.\n * @param scopes - The scopes for which the bearer token applies.\n */\n\nexport function storageBearerTokenChallengeAuthenticationPolicy(\n credential: TokenCredential,\n scopes: string | string[]\n): RequestPolicyFactory {\n // This simple function encapsulates the entire process of reliably retrieving the token\n let getToken = createTokenCycler(credential, scopes);\n\n class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy {\n public constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(webResource: WebResourceLike): Promise {\n if (!webResource.url.toLowerCase().startsWith(\"https://\")) {\n throw new Error(\n \"Bearer token authentication is not permitted for non-TLS protected (non-https) URLs.\"\n );\n }\n\n const getTokenInternal = getToken;\n const token = (\n await getTokenInternal({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n })\n ).token;\n webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`);\n\n const response = await this._nextPolicy.sendRequest(webResource);\n\n if (response?.status === 401) {\n const challenge = getChallenge(response);\n if (challenge) {\n const challengeInfo: Challenge = parseChallenge(challenge);\n const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope;\n const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri);\n const pathSegments = parsedAuthUri.getPath()!.split(\"/\");\n const tenantId = pathSegments[1];\n const getTokenForChallenge = createTokenCycler(credential, challengeScopes);\n\n const tokenForChallenge = (\n await getTokenForChallenge({\n abortSignal: webResource.abortSignal,\n tracingOptions: {\n tracingContext: webResource.tracingContext,\n },\n tenantId: tenantId,\n })\n ).token;\n\n getToken = getTokenForChallenge;\n webResource.headers.set(\n Constants.HeaderConstants.AUTHORIZATION,\n `Bearer ${tokenForChallenge}`\n );\n return this._nextPolicy.sendRequest(webResource);\n }\n }\n\n return response;\n }\n }\n\n return {\n create: (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => {\n return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options);\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n disableResponseDecompressionPolicy,\n HttpClient as IHttpClient,\n HttpHeaders,\n HttpOperationResponse,\n HttpRequestBody,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n ServiceClientOptions,\n WebResource,\n proxyPolicy,\n isNode,\n TokenCredential,\n isTokenCredential,\n tracingPolicy,\n logPolicy,\n ProxyOptions,\n keepAlivePolicy,\n KeepAliveOptions,\n generateClientRequestIdPolicy,\n UserAgentOptions,\n} from \"@azure/core-http\";\n\nimport { logger } from \"./log\";\nimport { StorageBrowserPolicyFactory } from \"./StorageBrowserPolicyFactory\";\nimport { StorageRetryOptions, StorageRetryPolicyFactory } from \"./StorageRetryPolicyFactory\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport {\n StorageOAuthScopes,\n StorageBlobLoggingAllowedHeaderNames,\n StorageBlobLoggingAllowedQueryParameters,\n} from \"./utils/constants\";\nimport { TelemetryPolicyFactory } from \"./TelemetryPolicyFactory\";\nimport { getCachedDefaultHttpClient } from \"./utils/cache\";\nimport { attachCredential } from \"./utils/utils.common\";\nimport { storageBearerTokenChallengeAuthenticationPolicy } from \"./policies/StorageBearerTokenChallengeAuthenticationPolicy\";\n\n// Export following interfaces and types for customers who want to implement their\n// own RequestPolicy or HTTPClient\nexport {\n BaseRequestPolicy,\n StorageOAuthScopes,\n deserializationPolicy,\n IHttpClient,\n HttpHeaders,\n HttpRequestBody,\n HttpOperationResponse,\n WebResource,\n RequestPolicyFactory,\n RequestPolicy,\n RequestPolicyOptions,\n};\n\n/**\n * Option interface for Pipeline constructor.\n */\nexport interface PipelineOptions {\n /**\n * Optional. Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n}\n\n/**\n * An interface for the {@link Pipeline} class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport interface PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n readonly options: PipelineOptions;\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n toServiceClientOptions(): ServiceClientOptions;\n}\n\n/**\n * A helper to decide if a given argument satisfies the Pipeline contract\n * @param pipeline - An argument that may be a Pipeline\n * @returns true when the argument satisfies the Pipeline contract\n */\nexport function isPipelineLike(pipeline: unknown): pipeline is PipelineLike {\n if (!pipeline || typeof pipeline !== \"object\") {\n return false;\n }\n\n const castPipeline = pipeline as PipelineLike;\n\n return (\n Array.isArray(castPipeline.factories) &&\n typeof castPipeline.options === \"object\" &&\n typeof castPipeline.toServiceClientOptions === \"function\"\n );\n}\n\n/**\n * A Pipeline class containing HTTP request policies.\n * You can create a default Pipeline by calling {@link newPipeline}.\n * Or you can create a Pipeline with your own policies by the constructor of Pipeline.\n *\n * Refer to {@link newPipeline} and provided policies before implementing your\n * customized Pipeline.\n */\nexport class Pipeline implements PipelineLike {\n /**\n * A list of chained request policy factories.\n */\n public readonly factories: RequestPolicyFactory[];\n /**\n * Configures pipeline logger and HTTP client.\n */\n public readonly options: PipelineOptions;\n\n /**\n * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface.\n *\n * @param factories -\n * @param options -\n */\n constructor(factories: RequestPolicyFactory[], options: PipelineOptions = {}) {\n this.factories = factories;\n // when options.httpClient is not specified, passing in a DefaultHttpClient instance to\n // avoid each client creating its own http client.\n this.options = {\n ...options,\n httpClient: options.httpClient || getCachedDefaultHttpClient(),\n };\n }\n\n /**\n * Transfer Pipeline object to ServiceClientOptions object which is required by\n * ServiceClient constructor.\n *\n * @returns The ServiceClientOptions object from this Pipeline.\n */\n public toServiceClientOptions(): ServiceClientOptions {\n return {\n httpClient: this.options.httpClient,\n requestPolicyFactories: this.factories,\n };\n }\n}\n\n/**\n * Options interface for the {@link newPipeline} function.\n */\nexport interface StoragePipelineOptions {\n /**\n * Options to configure a proxy for outgoing requests.\n */\n proxyOptions?: ProxyOptions;\n /**\n * Options for adding user agent details to outgoing requests.\n */\n userAgentOptions?: UserAgentOptions;\n /**\n * Configures the built-in retry policy behavior.\n */\n retryOptions?: StorageRetryOptions;\n /**\n * Keep alive configurations. Default keep-alive is enabled.\n */\n keepAliveOptions?: KeepAliveOptions;\n /**\n * Configures the HTTP client to send requests and receive responses.\n */\n httpClient?: IHttpClient;\n /**\n * The audience used to retrieve an AAD token.\n */\n audience?: string | string[];\n}\n\n/**\n * Creates a new Pipeline object with Credential provided.\n *\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param pipelineOptions - Optional. Options.\n * @returns A new Pipeline object.\n */\nexport function newPipeline(\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n pipelineOptions: StoragePipelineOptions = {}\n): Pipeline {\n if (credential === undefined) {\n credential = new AnonymousCredential();\n }\n\n // Order is important. Closer to the API at the top & closer to the network at the bottom.\n // The credential's policy factory must appear close to the wire so it can sign any\n // changes made by other factories (like UniqueRequestIDPolicyFactory)\n\n const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions);\n const factories: RequestPolicyFactory[] = [\n tracingPolicy({ userAgent: telemetryPolicy.telemetryString }),\n keepAlivePolicy(pipelineOptions.keepAliveOptions),\n telemetryPolicy,\n generateClientRequestIdPolicy(),\n new StorageBrowserPolicyFactory(),\n new StorageRetryPolicyFactory(pipelineOptions.retryOptions), // Retry policy should be above any policy that throws retryable errors\n // Default deserializationPolicy is provided by protocol layer\n // Use customized XML char key of \"#\" so we could deserialize metadata\n // with \"_\" key\n deserializationPolicy(undefined, { xmlCharKey: \"#\" }),\n logPolicy({\n logger: logger.info,\n allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames,\n allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters,\n }),\n ];\n\n if (isNode) {\n // policies only available in Node.js runtime, not in browsers\n factories.push(proxyPolicy(pipelineOptions.proxyOptions));\n factories.push(disableResponseDecompressionPolicy());\n }\n factories.push(\n isTokenCredential(credential)\n ? attachCredential(\n storageBearerTokenChallengeAuthenticationPolicy(\n credential,\n pipelineOptions.audience ?? StorageOAuthScopes\n ),\n credential\n )\n : credential\n );\n\n return new Pipeline(factories, pipelineOptions);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { RequestPolicy, RequestPolicyOptions, WebResource } from \"@azure/core-http\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { HeaderConstants } from \"../utils/constants\";\nimport { getURLPath, getURLQueries } from \"../utils/utils.common\";\nimport { CredentialPolicy } from \"./CredentialPolicy\";\n\n/**\n * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key.\n */\nexport class StorageSharedKeyCredentialPolicy extends CredentialPolicy {\n /**\n * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy\n */\n private readonly factory: StorageSharedKeyCredential;\n\n /**\n * Creates an instance of StorageSharedKeyCredentialPolicy.\n * @param nextPolicy -\n * @param options -\n * @param factory -\n */\n constructor(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions,\n factory: StorageSharedKeyCredential\n ) {\n super(nextPolicy, options);\n this.factory = factory;\n }\n\n /**\n * Signs request.\n *\n * @param request -\n */\n protected signRequest(request: WebResource): WebResource {\n request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString());\n\n if (\n request.body &&\n (typeof request.body === \"string\" || (request.body as Buffer) !== undefined) &&\n request.body.length > 0\n ) {\n request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body));\n }\n\n const stringToSign: string =\n [\n request.method.toUpperCase(),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5),\n this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE),\n this.getHeaderValueToSign(request, HeaderConstants.DATE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH),\n this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE),\n this.getHeaderValueToSign(request, HeaderConstants.RANGE),\n ].join(\"\\n\") +\n \"\\n\" +\n this.getCanonicalizedHeadersString(request) +\n this.getCanonicalizedResourceString(request);\n\n const signature: string = this.factory.computeHMACSHA256(stringToSign);\n request.headers.set(\n HeaderConstants.AUTHORIZATION,\n `SharedKey ${this.factory.accountName}:${signature}`\n );\n\n // console.log(`[URL]:${request.url}`);\n // console.log(`[HEADERS]:${request.headers.toString()}`);\n // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`);\n // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`);\n return request;\n }\n\n /**\n * Retrieve header value according to shared key sign rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n *\n * @param request -\n * @param headerName -\n */\n private getHeaderValueToSign(request: WebResource, headerName: string): string {\n const value = request.headers.get(headerName);\n\n if (!value) {\n return \"\";\n }\n\n // When using version 2015-02-21 or later, if Content-Length is zero, then\n // set the Content-Length part of the StringToSign to an empty string.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key\n if (headerName === HeaderConstants.CONTENT_LENGTH && value === \"0\") {\n return \"\";\n }\n\n return value;\n }\n\n /**\n * To construct the CanonicalizedHeaders portion of the signature string, follow these steps:\n * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header.\n * 2. Convert each HTTP header name to lowercase.\n * 3. Sort the headers lexicographically by header name, in ascending order.\n * Each header may appear only once in the string.\n * 4. Replace any linear whitespace in the header value with a single space.\n * 5. Trim any whitespace around the colon in the header.\n * 6. Finally, append a new-line character to each canonicalized header in the resulting list.\n * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string.\n *\n * @param request -\n */\n private getCanonicalizedHeadersString(request: WebResource): string {\n let headersArray = request.headers.headersArray().filter((value) => {\n return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE);\n });\n\n headersArray.sort((a, b): number => {\n return a.name.toLowerCase().localeCompare(b.name.toLowerCase());\n });\n\n // Remove duplicate headers\n headersArray = headersArray.filter((value, index, array) => {\n if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) {\n return false;\n }\n return true;\n });\n\n let canonicalizedHeadersStringToSign: string = \"\";\n headersArray.forEach((header) => {\n canonicalizedHeadersStringToSign += `${header.name\n .toLowerCase()\n .trimRight()}:${header.value.trimLeft()}\\n`;\n });\n\n return canonicalizedHeadersStringToSign;\n }\n\n /**\n * Retrieves the webResource canonicalized resource string.\n *\n * @param request -\n */\n private getCanonicalizedResourceString(request: WebResource): string {\n const path = getURLPath(request.url) || \"/\";\n\n let canonicalizedResourceString: string = \"\";\n canonicalizedResourceString += `/${this.factory.accountName}${path}`;\n\n const queries = getURLQueries(request.url);\n const lowercaseQueries: { [key: string]: string } = {};\n if (queries) {\n const queryKeys: string[] = [];\n for (const key in queries) {\n if (Object.prototype.hasOwnProperty.call(queries, key)) {\n const lowercaseKey = key.toLowerCase();\n lowercaseQueries[lowercaseKey] = queries[key];\n queryKeys.push(lowercaseKey);\n }\n }\n\n queryKeys.sort();\n for (const key of queryKeys) {\n canonicalizedResourceString += `\\n${key}:${decodeURIComponent(lowercaseQueries[key])}`;\n }\n }\n\n return canonicalizedResourceString;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { RequestPolicy, RequestPolicyOptions } from \"@azure/core-http\";\n\nimport { StorageSharedKeyCredentialPolicy } from \"../policies/StorageSharedKeyCredentialPolicy\";\nimport { Credential } from \"./Credential\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * StorageSharedKeyCredential for account key authorization of Azure Storage service.\n */\nexport class StorageSharedKeyCredential extends Credential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage account key; readonly.\n */\n private readonly accountKey: Buffer;\n\n /**\n * Creates an instance of StorageSharedKeyCredential.\n * @param accountName -\n * @param accountKey -\n */\n constructor(accountName: string, accountKey: string) {\n super();\n this.accountName = accountName;\n this.accountKey = Buffer.from(accountKey, \"base64\");\n }\n\n /**\n * Creates a StorageSharedKeyCredentialPolicy object.\n *\n * @param nextPolicy -\n * @param options -\n */\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): StorageSharedKeyCredentialPolicy {\n return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this);\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n return createHmac(\"sha256\", this.accountKey).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","/*\n * Copyright (c) Microsoft Corporation.\n * Licensed under the MIT License.\n *\n * Code generated by Microsoft (R) AutoRest Code Generator.\n * Changes may cause incorrect behavior and will be lost if the code is regenerated.\n */\n\nimport * as coreHttp from \"@azure/core-http\";\nimport { StorageClientOptionalParams } from \"./models\";\n\nconst packageName = \"azure-storage-blob\";\nconst packageVersion = \"12.11.0\";\n\nexport class StorageClientContext extends coreHttp.ServiceClient {\n url: string;\n version: string;\n\n /**\n * Initializes a new instance of the StorageClientContext class.\n * @param url The URL of the service account, container, or blob that is the target of the desired\n * operation.\n * @param options The parameter options\n */\n constructor(url: string, options?: StorageClientOptionalParams) {\n if (url === undefined) {\n throw new Error(\"'url' cannot be null\");\n }\n\n // Initializing default values for options\n if (!options) {\n options = {};\n }\n\n if (!options.userAgent) {\n const defaultUserAgent = coreHttp.getDefaultUserAgentValue();\n options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`;\n }\n\n super(undefined, options);\n\n this.requestContentType = \"application/json; charset=utf-8\";\n\n this.baseUri = options.endpoint || \"{url}\";\n\n // Parameter assignments\n this.url = url;\n\n // Assigning values to Constant parameters\n this.version = options.version || \"2021-08-06\";\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike } from \"./Pipeline\";\nimport { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from \"./utils/utils.common\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { TokenCredential, isTokenCredential, isNode } from \"@azure/core-http\";\nimport { OperationTracingOptions } from \"@azure/core-tracing\";\n\n/**\n * An interface for options common to every remote operation.\n */\nexport interface CommonOptions {\n /**\n * Options to configure spans created when tracing is enabled.\n */\n tracingOptions?: OperationTracingOptions;\n}\n\n/**\n * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient}\n * and etc.\n */\nexport abstract class StorageClient {\n /**\n * Encoded URL string value.\n */\n public readonly url: string;\n public readonly accountName: string;\n /**\n * Request policy pipeline.\n *\n * @internal\n */\n protected readonly pipeline: PipelineLike;\n /**\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n /**\n * StorageClient is a reference to protocol layer operations entry, which is\n * generated by AutoRest generator.\n */\n protected readonly storageClientContext: StorageClientContext;\n /**\n */\n protected readonly isHttps: boolean;\n\n /**\n * Creates an instance of StorageClient.\n * @param url - url to resource\n * @param pipeline - request policy pipeline.\n */\n protected constructor(url: string, pipeline: PipelineLike) {\n // URL should be encoded and only once, protocol layer shouldn't encode URL again\n this.url = escapeURLPath(url);\n this.accountName = getAccountNameFromUrl(url);\n this.pipeline = pipeline;\n this.storageClientContext = new StorageClientContext(\n this.url,\n pipeline.toServiceClientOptions()\n );\n\n this.isHttps = iEqual(getURLScheme(this.url) || \"\", \"https\");\n\n this.credential = new AnonymousCredential();\n for (const factory of this.pipeline.factories) {\n if (\n (isNode && factory instanceof StorageSharedKeyCredential) ||\n factory instanceof AnonymousCredential\n ) {\n this.credential = factory;\n } else if (isTokenCredential((factory as any).credential)) {\n // Only works if the factory has been attached a \"credential\" property.\n // We do that in newPipeline() when using TokenCredential.\n this.credential = (factory as any).credential;\n }\n }\n\n // Override protocol layer's default content-type\n const storageClientContext = this.storageClientContext as any;\n storageClientContext.requestContentType = undefined;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { OperationOptions, RequestOptionsBase } from \"@azure/core-http\";\nimport { createSpanFunction } from \"@azure/core-tracing\";\n\n/**\n * Creates a span using the global tracer.\n * @internal\n */\nexport const createSpan = createSpanFunction({\n packagePrefix: \"Azure.Storage.Blob\",\n namespace: \"Microsoft.Storage\",\n});\n\n/**\n * @internal\n *\n * Adapt the tracing options from OperationOptions to what they need to be for\n * RequestOptionsBase (when we update to later OpenTelemetry versions this is now\n * two separate fields, not just one).\n */\nexport function convertTracingToRequestOptionsBase(\n options?: OperationOptions\n): Pick {\n return {\n // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier.\n spanOptions: (options?.tracingOptions as any)?.spanOptions,\n tracingContext: options?.tracingOptions?.tracingContext,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting\n * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all\n * the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class BlobSASPermissions {\n /**\n * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n blobSASPermissions.read = true;\n break;\n case \"a\":\n blobSASPermissions.add = true;\n break;\n case \"c\":\n blobSASPermissions.create = true;\n break;\n case \"w\":\n blobSASPermissions.write = true;\n break;\n case \"d\":\n blobSASPermissions.delete = true;\n break;\n case \"x\":\n blobSASPermissions.deleteVersion = true;\n break;\n case \"t\":\n blobSASPermissions.tag = true;\n break;\n case \"m\":\n blobSASPermissions.move = true;\n break;\n case \"e\":\n blobSASPermissions.execute = true;\n break;\n case \"i\":\n blobSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n blobSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission: ${char}`);\n }\n }\n\n return blobSASPermissions;\n }\n\n /**\n * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions {\n const blobSASPermissions = new BlobSASPermissions();\n if (permissionLike.read) {\n blobSASPermissions.read = true;\n }\n if (permissionLike.add) {\n blobSASPermissions.add = true;\n }\n if (permissionLike.create) {\n blobSASPermissions.create = true;\n }\n if (permissionLike.write) {\n blobSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n blobSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n blobSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n blobSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n blobSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n blobSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n blobSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n blobSASPermissions.permanentDelete = true;\n }\n return blobSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * @returns A string which represents the BlobSASPermissions\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Blob SAS permission.\n * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface BlobSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container.\n * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation.\n * Once all the values are set, this should be serialized with toString and set as the permissions field on a\n * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class ContainerSASPermissions {\n /**\n * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid permission.\n *\n * @param permissions -\n */\n public static parse(permissions: string): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n\n for (const char of permissions) {\n switch (char) {\n case \"r\":\n containerSASPermissions.read = true;\n break;\n case \"a\":\n containerSASPermissions.add = true;\n break;\n case \"c\":\n containerSASPermissions.create = true;\n break;\n case \"w\":\n containerSASPermissions.write = true;\n break;\n case \"d\":\n containerSASPermissions.delete = true;\n break;\n case \"l\":\n containerSASPermissions.list = true;\n break;\n case \"t\":\n containerSASPermissions.tag = true;\n break;\n case \"x\":\n containerSASPermissions.deleteVersion = true;\n break;\n case \"m\":\n containerSASPermissions.move = true;\n break;\n case \"e\":\n containerSASPermissions.execute = true;\n break;\n case \"i\":\n containerSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n containerSASPermissions.permanentDelete = true;\n break;\n case \"f\":\n containerSASPermissions.filterByTags = true;\n break;\n default:\n throw new RangeError(`Invalid permission ${char}`);\n }\n }\n\n return containerSASPermissions;\n }\n\n /**\n * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions {\n const containerSASPermissions = new ContainerSASPermissions();\n if (permissionLike.read) {\n containerSASPermissions.read = true;\n }\n if (permissionLike.add) {\n containerSASPermissions.add = true;\n }\n if (permissionLike.create) {\n containerSASPermissions.create = true;\n }\n if (permissionLike.write) {\n containerSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n containerSASPermissions.delete = true;\n }\n if (permissionLike.list) {\n containerSASPermissions.list = true;\n }\n if (permissionLike.deleteVersion) {\n containerSASPermissions.deleteVersion = true;\n }\n if (permissionLike.tag) {\n containerSASPermissions.tag = true;\n }\n if (permissionLike.move) {\n containerSASPermissions.move = true;\n }\n if (permissionLike.execute) {\n containerSASPermissions.execute = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n containerSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n containerSASPermissions.permanentDelete = true;\n }\n if (permissionLike.filterByTags) {\n containerSASPermissions.filterByTags = true;\n }\n return containerSASPermissions;\n }\n\n /**\n * Specifies Read access granted.\n */\n public read: boolean = false;\n\n /**\n * Specifies Add access granted.\n */\n public add: boolean = false;\n\n /**\n * Specifies Create access granted.\n */\n public create: boolean = false;\n\n /**\n * Specifies Write access granted.\n */\n public write: boolean = false;\n\n /**\n * Specifies Delete access granted.\n */\n public delete: boolean = false;\n\n /**\n * Specifies Delete version access granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Specifies List access granted.\n */\n public list: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Specifies Move access granted.\n */\n public move: boolean = false;\n\n /**\n * Specifies Execute access granted.\n */\n public execute: boolean = false;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n public filterByTags: boolean = false;\n\n /**\n * Converts the given permissions to a string. Using this method will guarantee the permissions are in an\n * order accepted by the service.\n *\n * The order of the characters should be as specified here to ensure correctness.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n */\n public toString(): string {\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.move) {\n permissions.push(\"m\");\n }\n if (this.execute) {\n permissions.push(\"e\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n if (this.filterByTags) {\n permissions.push(\"f\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like a Container SAS permission.\n * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface ContainerSASPermissionsLike {\n /**\n * Specifies Read access granted.\n */\n read?: boolean;\n\n /**\n * Specifies Add access granted.\n */\n add?: boolean;\n\n /**\n * Specifies Create access granted.\n */\n create?: boolean;\n\n /**\n * Specifies Write access granted.\n */\n write?: boolean;\n\n /**\n * Specifies Delete access granted.\n */\n delete?: boolean;\n\n /**\n * Specifies Delete version access granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Specifies List access granted.\n */\n list?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Specifies Move access granted.\n */\n move?: boolean;\n\n /**\n * Specifies Execute access granted.\n */\n execute?: boolean;\n\n /**\n * Specifies SetImmutabilityPolicy access granted.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n\n /**\n * Specifies that Filter Blobs by Tags is permitted.\n */\n filterByTags?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { createHmac } from \"crypto\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * UserDelegationKeyCredential is only used for generation of user delegation SAS.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas\n */\nexport class UserDelegationKeyCredential {\n /**\n * Azure Storage account name; readonly.\n */\n public readonly accountName: string;\n\n /**\n * Azure Storage user delegation key; readonly.\n */\n public readonly userDelegationKey: UserDelegationKey;\n\n /**\n * Key value in Buffer type.\n */\n private readonly key: Buffer;\n\n /**\n * Creates an instance of UserDelegationKeyCredential.\n * @param accountName -\n * @param userDelegationKey -\n */\n constructor(accountName: string, userDelegationKey: UserDelegationKey) {\n this.accountName = accountName;\n this.userDelegationKey = userDelegationKey;\n this.key = Buffer.from(userDelegationKey.value, \"base64\");\n }\n\n /**\n * Generates a hash signature for an HTTP request or for a SAS.\n *\n * @param stringToSign -\n */\n public computeHMACSHA256(stringToSign: string): string {\n // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`);\n\n return createHmac(\"sha256\", this.key).update(stringToSign, \"utf8\").digest(\"base64\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Allowed IP range for a SAS.\n */\nexport interface SasIPRange {\n /**\n * Starting IP address in the IP range.\n * If end IP doesn't provide, start IP will the only IP allowed.\n */\n start: string;\n /**\n * Optional. IP address that ends the IP range.\n * If not provided, start IP will the only IP allowed.\n */\n end?: string;\n}\n\n/**\n * Generate SasIPRange format string. For example:\n *\n * \"8.8.8.8\" or \"1.1.1.1-255.255.255.255\"\n *\n * @param ipRange -\n */\nexport function ipRangeToString(ipRange: SasIPRange): string {\n return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\n\n/**\n * Protocols for generated SAS.\n */\nexport enum SASProtocol {\n /**\n * Protocol that allows HTTPS only\n */\n Https = \"https\",\n\n /**\n * Protocol that allows both HTTPS and HTTP\n */\n HttpsAndHttp = \"https,http\",\n}\n\n/**\n * Options to construct {@link SASQueryParameters}.\n */\nexport interface SASQueryParametersOptions {\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n permissions?: string;\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n services?: string;\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n resourceTypes?: string;\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n protocol?: SASProtocol;\n /**\n * Optional. The start time for this SAS token.\n */\n startsOn?: Date;\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n expiresOn?: Date;\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n resource?: string;\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n cacheControl?: string;\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n contentDisposition?: string;\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n contentEncoding?: string;\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n contentLanguage?: string;\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n contentType?: string;\n /**\n * User delegation key properties.\n */\n userDelegationKey?: UserDelegationKey;\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}.\n * This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly\n * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues}\n * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should\n * be taken here in case there are existing query parameters, which might affect the appropriate means of appending\n * these query parameters).\n *\n * NOTE: Instances of this class are immutable.\n */\nexport class SASQueryParameters {\n /**\n * The storage API version.\n */\n public readonly version: string;\n\n /**\n * Optional. The allowed HTTP protocol(s).\n */\n public readonly protocol?: SASProtocol;\n\n /**\n * Optional. The start time for this SAS token.\n */\n public readonly startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The expiry time for this SAS token.\n */\n public readonly expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for\n * more details.\n */\n public readonly permissions?: string;\n\n /**\n * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices}\n * for more details.\n */\n public readonly services?: string;\n\n /**\n * Optional. The storage resource types being accessed (only for Account SAS). Please refer to\n * {@link AccountSASResourceTypes} for more details.\n */\n public readonly resourceTypes?: string;\n\n /**\n * Optional. The signed identifier (only for {@link BlobSASSignatureValues}).\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n public readonly identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n public readonly encryptionScope?: string;\n\n /**\n * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}).\n * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only\n */\n public readonly resource?: string;\n\n /**\n * The signature for the SAS token.\n */\n public readonly signature: string;\n\n /**\n * Value for cache-control header in Blob/File Service SAS.\n */\n public readonly cacheControl?: string;\n\n /**\n * Value for content-disposition header in Blob/File Service SAS.\n */\n public readonly contentDisposition?: string;\n\n /**\n * Value for content-encoding header in Blob/File Service SAS.\n */\n public readonly contentEncoding?: string;\n\n /**\n * Value for content-length header in Blob/File Service SAS.\n */\n public readonly contentLanguage?: string;\n\n /**\n * Value for content-type header in Blob/File Service SAS.\n */\n public readonly contentType?: string;\n\n /**\n * Inner value of getter ipRange.\n */\n private readonly ipRangeInner?: SasIPRange;\n\n /**\n * The Azure Active Directory object ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedOid?: string;\n\n /**\n * The Azure Active Directory tenant ID in GUID format.\n * Property of user delegation key.\n */\n private readonly signedTenantId?: string;\n\n /**\n * The date-time the key is active.\n * Property of user delegation key.\n */\n private readonly signedStartsOn?: Date;\n\n /**\n * The date-time the key expires.\n * Property of user delegation key.\n */\n private readonly signedExpiresOn?: Date;\n\n /**\n * Abbreviation of the Azure Storage service that accepts the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedService?: string;\n\n /**\n * The service version that created the user delegation key.\n * Property of user delegation key.\n */\n private readonly signedVersion?: string;\n\n /**\n * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key\n * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key\n * has the required permissions before granting access but no additional permission check for the user specified in\n * this value will be performed. This is only used for User Delegation SAS.\n */\n public readonly preauthorizedAgentObjectId?: string;\n\n /**\n * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access.\n * This is only used for User Delegation SAS.\n */\n public readonly correlationId?: string;\n\n /**\n * Optional. IP range allowed for this SAS.\n *\n * @readonly\n */\n public get ipRange(): SasIPRange | undefined {\n if (this.ipRangeInner) {\n return {\n end: this.ipRangeInner.end,\n start: this.ipRangeInner.start,\n };\n }\n return undefined;\n }\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param permissions - Representing the storage permissions\n * @param services - Representing the storage services being accessed (only for Account SAS)\n * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS)\n * @param protocol - Representing the allowed HTTP protocol(s)\n * @param startsOn - Representing the start time for this SAS token\n * @param expiresOn - Representing the expiry time for this SAS token\n * @param ipRange - Representing the range of valid IP addresses for this SAS token\n * @param identifier - Representing the signed identifier (only for Service SAS)\n * @param resource - Representing the storage container or blob (only for Service SAS)\n * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS)\n * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS)\n * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS)\n * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS)\n * @param contentType - Representing the content-type header (only for Blob/File Service SAS)\n * @param userDelegationKey - Representing the user delegation key properties\n * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS)\n * @param correlationId - Representing the correlation ID (only for User Delegation SAS)\n * @param encryptionScope -\n */\n constructor(\n version: string,\n signature: string,\n permissions?: string,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n );\n\n /**\n * Creates an instance of SASQueryParameters.\n *\n * @param version - Representing the storage version\n * @param signature - Representing the signature for the SAS token\n * @param options - Optional. Options to construct the SASQueryParameters.\n */\n constructor(version: string, signature: string, options?: SASQueryParametersOptions);\n\n constructor(\n version: string,\n signature: string,\n permissionsOrOptions?: string | SASQueryParametersOptions,\n services?: string,\n resourceTypes?: string,\n protocol?: SASProtocol,\n startsOn?: Date,\n expiresOn?: Date,\n ipRange?: SasIPRange,\n identifier?: string,\n resource?: string,\n cacheControl?: string,\n contentDisposition?: string,\n contentEncoding?: string,\n contentLanguage?: string,\n contentType?: string,\n userDelegationKey?: UserDelegationKey,\n preauthorizedAgentObjectId?: string,\n correlationId?: string,\n encryptionScope?: string\n ) {\n this.version = version;\n this.signature = signature;\n\n if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== \"string\") {\n // SASQueryParametersOptions\n this.permissions = permissionsOrOptions.permissions;\n this.services = permissionsOrOptions.services;\n this.resourceTypes = permissionsOrOptions.resourceTypes;\n this.protocol = permissionsOrOptions.protocol;\n this.startsOn = permissionsOrOptions.startsOn;\n this.expiresOn = permissionsOrOptions.expiresOn;\n this.ipRangeInner = permissionsOrOptions.ipRange;\n this.identifier = permissionsOrOptions.identifier;\n this.encryptionScope = permissionsOrOptions.encryptionScope;\n this.resource = permissionsOrOptions.resource;\n this.cacheControl = permissionsOrOptions.cacheControl;\n this.contentDisposition = permissionsOrOptions.contentDisposition;\n this.contentEncoding = permissionsOrOptions.contentEncoding;\n this.contentLanguage = permissionsOrOptions.contentLanguage;\n this.contentType = permissionsOrOptions.contentType;\n\n if (permissionsOrOptions.userDelegationKey) {\n this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId;\n this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId;\n this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn;\n this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn;\n this.signedService = permissionsOrOptions.userDelegationKey.signedService;\n this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId;\n this.correlationId = permissionsOrOptions.correlationId;\n }\n } else {\n this.services = services;\n this.resourceTypes = resourceTypes;\n this.expiresOn = expiresOn;\n this.permissions = permissionsOrOptions;\n this.protocol = protocol;\n this.startsOn = startsOn;\n this.ipRangeInner = ipRange;\n this.encryptionScope = encryptionScope;\n this.identifier = identifier;\n this.resource = resource;\n this.cacheControl = cacheControl;\n this.contentDisposition = contentDisposition;\n this.contentEncoding = contentEncoding;\n this.contentLanguage = contentLanguage;\n this.contentType = contentType;\n\n if (userDelegationKey) {\n this.signedOid = userDelegationKey.signedObjectId;\n this.signedTenantId = userDelegationKey.signedTenantId;\n this.signedStartsOn = userDelegationKey.signedStartsOn;\n this.signedExpiresOn = userDelegationKey.signedExpiresOn;\n this.signedService = userDelegationKey.signedService;\n this.signedVersion = userDelegationKey.signedVersion;\n\n this.preauthorizedAgentObjectId = preauthorizedAgentObjectId;\n this.correlationId = correlationId;\n }\n }\n }\n\n /**\n * Encodes all SAS query parameters into a string that can be appended to a URL.\n *\n */\n public toString(): string {\n const params: string[] = [\n \"sv\",\n \"ss\",\n \"srt\",\n \"spr\",\n \"st\",\n \"se\",\n \"sip\",\n \"si\",\n \"ses\",\n \"skoid\", // Signed object ID\n \"sktid\", // Signed tenant ID\n \"skt\", // Signed key start time\n \"ske\", // Signed key expiry time\n \"sks\", // Signed key service\n \"skv\", // Signed key version\n \"sr\",\n \"sp\",\n \"sig\",\n \"rscc\",\n \"rscd\",\n \"rsce\",\n \"rscl\",\n \"rsct\",\n \"saoid\",\n \"scid\",\n ];\n const queries: string[] = [];\n\n for (const param of params) {\n switch (param) {\n case \"sv\":\n this.tryAppendQueryParameter(queries, param, this.version);\n break;\n case \"ss\":\n this.tryAppendQueryParameter(queries, param, this.services);\n break;\n case \"srt\":\n this.tryAppendQueryParameter(queries, param, this.resourceTypes);\n break;\n case \"spr\":\n this.tryAppendQueryParameter(queries, param, this.protocol);\n break;\n case \"st\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined\n );\n break;\n case \"se\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined\n );\n break;\n case \"sip\":\n this.tryAppendQueryParameter(\n queries,\n param,\n this.ipRange ? ipRangeToString(this.ipRange) : undefined\n );\n break;\n case \"si\":\n this.tryAppendQueryParameter(queries, param, this.identifier);\n break;\n case \"ses\":\n this.tryAppendQueryParameter(queries, param, this.encryptionScope);\n break;\n case \"skoid\": // Signed object ID\n this.tryAppendQueryParameter(queries, param, this.signedOid);\n break;\n case \"sktid\": // Signed tenant ID\n this.tryAppendQueryParameter(queries, param, this.signedTenantId);\n break;\n case \"skt\": // Signed key start time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined\n );\n break;\n case \"ske\": // Signed key expiry time\n this.tryAppendQueryParameter(\n queries,\n param,\n this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined\n );\n break;\n case \"sks\": // Signed key service\n this.tryAppendQueryParameter(queries, param, this.signedService);\n break;\n case \"skv\": // Signed key version\n this.tryAppendQueryParameter(queries, param, this.signedVersion);\n break;\n case \"sr\":\n this.tryAppendQueryParameter(queries, param, this.resource);\n break;\n case \"sp\":\n this.tryAppendQueryParameter(queries, param, this.permissions);\n break;\n case \"sig\":\n this.tryAppendQueryParameter(queries, param, this.signature);\n break;\n case \"rscc\":\n this.tryAppendQueryParameter(queries, param, this.cacheControl);\n break;\n case \"rscd\":\n this.tryAppendQueryParameter(queries, param, this.contentDisposition);\n break;\n case \"rsce\":\n this.tryAppendQueryParameter(queries, param, this.contentEncoding);\n break;\n case \"rscl\":\n this.tryAppendQueryParameter(queries, param, this.contentLanguage);\n break;\n case \"rsct\":\n this.tryAppendQueryParameter(queries, param, this.contentType);\n break;\n case \"saoid\":\n this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId);\n break;\n case \"scid\":\n this.tryAppendQueryParameter(queries, param, this.correlationId);\n break;\n }\n }\n return queries.join(\"&\");\n }\n\n /**\n * A private helper method used to filter and append query key/value pairs into an array.\n *\n * @param queries -\n * @param key -\n * @param value -\n */\n private tryAppendQueryParameter(queries: string[], key: string, value?: string): void {\n if (!value) {\n return;\n }\n\n key = encodeURIComponent(key);\n value = encodeURIComponent(value);\n if (key.length > 0 && value.length > 0) {\n queries.push(`${key}=${value}`);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { BlobSASPermissions } from \"./BlobSASPermissions\";\nimport { UserDelegationKey } from \"../BlobServiceClient\";\nimport { ContainerSASPermissions } from \"./ContainerSASPermissions\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { UserDelegationKeyCredential } from \"../credentials/UserDelegationKeyCredential\";\nimport { ipRangeToString, SasIPRange } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs.\n */\nexport interface BlobSASSignatureValues {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional only when identifier is provided.\n * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource\n * being accessed for help constructing the permissions string.\n */\n permissions?: BlobSASPermissions | ContainerSASPermissions;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * The name of the container the SAS user may access.\n */\n containerName: string;\n\n /**\n * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided.\n */\n blobName?: string;\n\n /**\n * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09.\n */\n snapshotTime?: string;\n\n /**\n * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10.\n */\n versionId?: string;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user\n * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will\n * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission\n * check for the user specified in this value will be performed. This is only used for User Delegation SAS.\n */\n preauthorizedAgentObjectId?: string;\n\n /**\n * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to\n * correlate SAS generation with storage resource access. This is only used for User Delegation SAS.\n */\n correlationId?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * Fill in the required details before running the following snippets.\n *\n * Example usage:\n *\n * ```js\n * // Generate service level SAS for a container\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using an identifier:\n *\n * ```js\n * // Generate service level SAS for a container with identifier\n * // startsOn & permissions are optional when identifier is provided\n * const identifier = \"unique-id\";\n * await containerClient.setAccessPolicy(undefined, [\n * {\n * accessPolicy: {\n * expiresOn: new Date(new Date().valueOf() + 86400), // Date type\n * permissions: ContainerSASPermissions.parse(\"racwdl\").toString(),\n * startsOn: new Date() // Date type\n * },\n * id: identifier\n * }\n * ]);\n *\n * const containerSAS = generateBlobSASQueryParameters(\n * {\n * containerName, // Required\n * identifier // Required\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * Example using a blob name:\n *\n * ```js\n * // Generate service level SAS for a blob\n * const blobSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * blobName, // Required\n * permissions: BlobSASPermissions.parse(\"racwd\"), // Required\n * startsOn: new Date(), // Optional\n * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type\n * cacheControl: \"cache-control-override\", // Optional\n * contentDisposition: \"content-disposition-override\", // Optional\n * contentEncoding: \"content-encoding-override\", // Optional\n * contentLanguage: \"content-language-override\", // Optional\n * contentType: \"content-type-override\", // Optional\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2016-05-31\" // Optional\n * },\n * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)`\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters;\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Creates an instance of SASQueryParameters.\n * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required.\n *\n * Example usage:\n *\n * ```js\n * // Generate user delegation SAS for a container\n * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn);\n * const containerSAS = generateBlobSASQueryParameters({\n * containerName, // Required\n * permissions: ContainerSASPermissions.parse(\"racwdl\"), // Required\n * startsOn, // Optional. Date type\n * expiresOn, // Required. Date type\n * ipRange: { start: \"0.0.0.0\", end: \"255.255.255.255\" }, // Optional\n * protocol: SASProtocol.HttpsAndHttp, // Optional\n * version: \"2018-11-09\" // Must greater than or equal to 2018-11-09 to generate user delegation SAS\n * },\n * userDelegationKey, // UserDelegationKey\n * accountName\n * ).toString();\n * ```\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()`\n * @param accountName -\n */\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKey: UserDelegationKey,\n accountName: string\n): SASQueryParameters;\n\nexport function generateBlobSASQueryParameters(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredentialOrUserDelegationKey: StorageSharedKeyCredential | UserDelegationKey,\n accountName?: string\n): SASQueryParameters {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n\n const sharedKeyCredential =\n sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential\n ? sharedKeyCredentialOrUserDelegationKey\n : undefined;\n let userDelegationKeyCredential: UserDelegationKeyCredential | undefined;\n\n if (sharedKeyCredential === undefined && accountName !== undefined) {\n userDelegationKeyCredential = new UserDelegationKeyCredential(\n accountName,\n sharedKeyCredentialOrUserDelegationKey as UserDelegationKey\n );\n }\n\n if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) {\n throw TypeError(\"Invalid sharedKeyCredential, userDelegationKey or accountName.\");\n }\n\n // Version 2020-12-06 adds support for encryptionscope in SAS.\n if (version >= \"2020-12-06\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential);\n } else {\n return generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n\n // Version 2019-12-12 adds support for the blob tags permission.\n // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields.\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string\n if (version >= \"2018-11-09\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential);\n } else {\n // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId.\n if (version >= \"2020-02-10\") {\n return generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n } else {\n return generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues,\n userDelegationKeyCredential!\n );\n }\n }\n }\n\n if (version >= \"2015-04-05\") {\n if (sharedKeyCredential !== undefined) {\n return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential);\n } else {\n throw new RangeError(\n \"'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key.\"\n );\n }\n }\n\n throw new RangeError(\"'version' must be >= '2015-04-05'.\");\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20150405(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn and identifier.\n *\n * WARNING: When identifier is not provided, permissions and expiresOn are required.\n * You MUST assign value to identifier or expiresOn & permissions manually if you initial with\n * this constructor.\n *\n * @param blobSASSignatureValues -\n * @param sharedKeyCredential -\n */\nfunction generateBlobSASQueryParameters20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n if (\n !blobSASSignatureValues.identifier &&\n !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)\n ) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n sharedKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n blobSASSignatureValues.identifier,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : \"\",\n blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : \"\",\n blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : \"\",\n blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : \"\",\n blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : \"\",\n ].join(\"\\n\");\n\n const signature = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n undefined,\n undefined,\n undefined,\n blobSASSignatureValues.encryptionScope\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2018-11-09.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20181109(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-02-10.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20200210(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId\n );\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n * IMPLEMENTATION FOR API VERSION FROM 2020-12-06.\n *\n * Creates an instance of SASQueryParameters.\n *\n * Only accepts required settings needed to create a SAS. For optional settings please\n * set corresponding properties directly, such as permissions, startsOn.\n *\n * WARNING: identifier will be ignored, permissions and expiresOn are required.\n *\n * @param blobSASSignatureValues -\n * @param userDelegationKeyCredential -\n */\nfunction generateBlobSASQueryParametersUDK20201206(\n blobSASSignatureValues: BlobSASSignatureValues,\n userDelegationKeyCredential: UserDelegationKeyCredential\n): SASQueryParameters {\n blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues);\n\n // Stored access policies are not supported for a user delegation SAS.\n if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) {\n throw new RangeError(\n \"Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS.\"\n );\n }\n\n let resource: string = \"c\";\n let timestamp = blobSASSignatureValues.snapshotTime;\n if (blobSASSignatureValues.blobName) {\n resource = \"b\";\n if (blobSASSignatureValues.snapshotTime) {\n resource = \"bs\";\n } else if (blobSASSignatureValues.versionId) {\n resource = \"bv\";\n timestamp = blobSASSignatureValues.versionId;\n }\n }\n\n // Calling parse and toString guarantees the proper ordering and throws on invalid characters.\n let verifiedPermissions: string | undefined;\n if (blobSASSignatureValues.permissions) {\n if (blobSASSignatureValues.blobName) {\n verifiedPermissions = BlobSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n } else {\n verifiedPermissions = ContainerSASPermissions.parse(\n blobSASSignatureValues.permissions.toString()\n ).toString();\n }\n }\n\n // Signature is generated on the un-url-encoded values.\n const stringToSign = [\n verifiedPermissions ? verifiedPermissions : \"\",\n blobSASSignatureValues.startsOn\n ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false)\n : \"\",\n blobSASSignatureValues.expiresOn\n ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false)\n : \"\",\n getCanonicalName(\n userDelegationKeyCredential.accountName,\n blobSASSignatureValues.containerName,\n blobSASSignatureValues.blobName\n ),\n userDelegationKeyCredential.userDelegationKey.signedObjectId,\n userDelegationKeyCredential.userDelegationKey.signedTenantId,\n userDelegationKeyCredential.userDelegationKey.signedStartsOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedExpiresOn\n ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false)\n : \"\",\n userDelegationKeyCredential.userDelegationKey.signedService,\n userDelegationKeyCredential.userDelegationKey.signedVersion,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n undefined, // agentObjectId\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : \"\",\n blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : \"\",\n blobSASSignatureValues.version,\n resource,\n timestamp,\n blobSASSignatureValues.encryptionScope,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n ].join(\"\\n\");\n\n const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign);\n return new SASQueryParameters(\n blobSASSignatureValues.version!,\n signature,\n verifiedPermissions,\n undefined,\n undefined,\n blobSASSignatureValues.protocol,\n blobSASSignatureValues.startsOn,\n blobSASSignatureValues.expiresOn,\n blobSASSignatureValues.ipRange,\n blobSASSignatureValues.identifier,\n resource,\n blobSASSignatureValues.cacheControl,\n blobSASSignatureValues.contentDisposition,\n blobSASSignatureValues.contentEncoding,\n blobSASSignatureValues.contentLanguage,\n blobSASSignatureValues.contentType,\n userDelegationKeyCredential.userDelegationKey,\n blobSASSignatureValues.preauthorizedAgentObjectId,\n blobSASSignatureValues.correlationId,\n blobSASSignatureValues.encryptionScope\n );\n}\n\nfunction getCanonicalName(accountName: string, containerName: string, blobName?: string): string {\n // Container: \"/blob/account/containerName\"\n // Blob: \"/blob/account/containerName/blobName\"\n const elements: string[] = [`/blob/${accountName}/${containerName}`];\n if (blobName) {\n elements.push(`/${blobName}`);\n }\n return elements.join(\"\");\n}\n\nfunction SASSignatureValuesSanityCheckAndAutofill(\n blobSASSignatureValues: BlobSASSignatureValues\n): BlobSASSignatureValues {\n const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION;\n if (blobSASSignatureValues.snapshotTime && version < \"2018-11-09\") {\n throw RangeError(\"'version' must be >= '2018-11-09' when providing 'snapshotTime'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) {\n throw RangeError(\"Must provide 'blobName' when providing 'snapshotTime'.\");\n }\n\n if (blobSASSignatureValues.versionId && version < \"2019-10-10\") {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'versionId'.\");\n }\n if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) {\n throw RangeError(\"Must provide 'blobName' when providing 'versionId'.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'x' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when providing 'y' permission.\");\n }\n\n if (\n blobSASSignatureValues.permissions &&\n blobSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when providing 't' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)\n ) {\n throw RangeError(\"'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission.\");\n }\n\n if (\n version < \"2021-04-10\" &&\n blobSASSignatureValues.permissions &&\n (blobSASSignatureValues.permissions as ContainerSASPermissions).filterByTags\n ) {\n throw RangeError(\"'version' must be >= '2021-04-10' when providing the 'f' permission.\");\n }\n\n if (\n version < \"2020-02-10\" &&\n (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)\n ) {\n throw RangeError(\n \"'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'.\"\n );\n }\n\n if (blobSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n blobSASSignatureValues.version = version;\n return blobSASSignatureValues;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { generateUuid, HttpResponse } from \"@azure/core-http\";\nimport { StorageClientContext } from \"./generated/src/index\";\nimport { ContainerBreakLeaseOptionalParams } from \"./generatedModels\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Blob as StorageBlob, Container } from \"./generated/src/operations\";\nimport { ModifiedAccessConditions } from \"./models\";\nimport { CommonOptions } from \"./StorageClient\";\nimport { ETagNone } from \"./utils/constants\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobClient } from \"./Clients\";\nimport { ContainerClient } from \"./ContainerClient\";\n\n/**\n * The details for a specific lease.\n */\nexport interface Lease {\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally. If the request version is 2011-08-18 or\n * newer, the ETag value will be in quotes.\n */\n etag?: string;\n /**\n * Returns the date and time the container was\n * last modified. Any operation that modifies the blob, including an update\n * of the blob's metadata or properties, changes the last-modified time of\n * the blob.\n */\n lastModified?: Date;\n /**\n * Uniquely identifies a container's lease\n */\n leaseId?: string;\n /**\n * Approximate time remaining in the lease\n * period, in seconds.\n */\n leaseTime?: number;\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n */\n requestId?: string;\n /**\n * Indicates the version of the Blob service used\n * to execute the request. This header is returned for requests made against\n * version 2009-09-19 and above.\n */\n version?: string;\n /**\n * UTC date/time value generated by the service that\n * indicates the time at which the response was initiated\n */\n date?: Date;\n /**\n * Error code if any associated with the response that returned\n * the Lease information.\n */\n errorCode?: string;\n}\n\n/**\n * Contains the response data for operations that create, modify, or delete a lease.\n *\n * See {@link BlobLeaseClient}.\n */\nexport type LeaseOperationResponse = Lease & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: Lease;\n };\n};\n\n/**\n * Configures lease operations.\n */\nexport interface LeaseOperationOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}.\n */\nexport class BlobLeaseClient {\n private _leaseId: string;\n private _url: string;\n private _containerOrBlobOperation: Container | StorageBlob;\n private _isContainer: boolean;\n\n /**\n * Gets the lease Id.\n *\n * @readonly\n */\n public get leaseId(): string {\n return this._leaseId;\n }\n\n /**\n * Gets the url.\n *\n * @readonly\n */\n public get url(): string {\n return this._url;\n }\n\n /**\n * Creates an instance of BlobLeaseClient.\n * @param client - The client to make the lease operation requests.\n * @param leaseId - Initial proposed lease id.\n */\n constructor(client: ContainerClient | BlobClient, leaseId?: string) {\n const clientContext = new StorageClientContext(\n client.url,\n (client as any).pipeline.toServiceClientOptions()\n );\n this._url = client.url;\n\n if ((client as BlobClient).name === undefined) {\n this._isContainer = true;\n this._containerOrBlobOperation = new Container(clientContext);\n } else {\n this._isContainer = false;\n this._containerOrBlobOperation = new StorageBlob(clientContext);\n }\n\n if (!leaseId) {\n leaseId = generateUuid();\n }\n this._leaseId = leaseId;\n }\n\n /**\n * Establishes and manages a lock on a container for delete operations, or on a blob\n * for write and delete operations.\n * The lock duration can be 15 to 60 seconds, or can be infinite.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param duration - Must be between 15 to 60 seconds, or infinite (-1)\n * @param options - option to configure lease management operations.\n * @returns Response data for acquire lease operation.\n */\n public async acquireLease(\n duration: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-acquireLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.acquireLease({\n abortSignal: options.abortSignal,\n duration,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n proposedLeaseId: this._leaseId,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To change the ID of the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param proposedLeaseId - the proposed new lease Id.\n * @param options - option to configure lease management operations.\n * @returns Response data for change lease operation.\n */\n public async changeLease(\n proposedLeaseId: string,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-changeLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const response = await this._containerOrBlobOperation.changeLease(\n this._leaseId,\n proposedLeaseId,\n {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n this._leaseId = proposedLeaseId;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To free the lease if it is no longer needed so that another client may\n * immediately acquire a lease against the container or the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - option to configure lease management operations.\n * @returns Response data for release lease operation.\n */\n public async releaseLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-releaseLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.releaseLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To renew the lease.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param options - Optional option to configure lease management operations.\n * @returns Response data for renew lease operation.\n */\n public async renewLease(options: LeaseOperationOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-renewLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n return await this._containerOrBlobOperation.renewLease(this._leaseId, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * To end the lease but ensure that another client cannot acquire a new lease\n * until the current lease period has expired.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container\n * and\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob\n *\n * @param breakPeriod - Break period\n * @param options - Optional options to configure lease management operations.\n * @returns Response data for break lease operation.\n */\n public async breakLease(\n breakPeriod: number,\n options: LeaseOperationOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobLeaseClient-breakLease\", options);\n\n if (\n this._isContainer &&\n ((options.conditions?.ifMatch && options.conditions?.ifMatch !== ETagNone) ||\n (options.conditions?.ifNoneMatch && options.conditions?.ifNoneMatch !== ETagNone) ||\n options.conditions?.tagConditions)\n ) {\n throw new RangeError(\n \"The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable.\"\n );\n }\n\n try {\n const operationOptions: ContainerBreakLeaseOptionalParams = {\n abortSignal: options.abortSignal,\n breakPeriod,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n };\n return await this._containerOrBlobOperation.breakLease(operationOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { TransferProgressEvent } from \"@azure/core-http\";\nimport { Readable } from \"stream\";\n\nexport type ReadableStreamGetter = (offset: number) => Promise;\n\nexport interface RetriableReadableStreamOptions {\n /**\n * Max retry count (greater than or equal to 0), undefined or invalid value means no retry\n */\n maxRetryRequests?: number;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Debug purpose only. Used to inject an unexpected end to existing internal stream,\n * to test stream retry works well or not.\n *\n * When assign it to true, for next incoming \"data\" event of internal stream,\n * RetriableReadableStream will try to emit an \"end\" event to existing internal\n * stream to force it end and start retry from the breaking point.\n * The value will then update to \"undefined\", once the injection works.\n */\n doInjectErrorOnce?: boolean;\n\n /**\n * A threshold, not a limit. Dictates the amount of data that a stream buffers before it stops asking for more data.\n */\n highWaterMark?: number;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends.\n */\nexport class RetriableReadableStream extends Readable {\n private start: number;\n private offset: number;\n private end: number;\n private getter: ReadableStreamGetter;\n private source: NodeJS.ReadableStream;\n private retries: number = 0;\n private maxRetryRequests: number;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private options: RetriableReadableStreamOptions;\n\n /**\n * Creates an instance of RetriableReadableStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param getter - A method calling downloading request returning\n * a new ReadableStream from specified offset\n * @param offset - Offset position in original data source to read\n * @param count - How much data in original data source to read\n * @param options -\n */\n public constructor(\n source: NodeJS.ReadableStream,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n super({ highWaterMark: options.highWaterMark });\n this.getter = getter;\n this.source = source;\n this.start = offset;\n this.offset = offset;\n this.end = offset + count - 1;\n this.maxRetryRequests =\n options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0;\n this.onProgress = options.onProgress;\n this.options = options;\n\n this.setSourceEventHandlers();\n }\n\n public _read(): void {\n this.source.resume();\n }\n\n private setSourceEventHandlers() {\n this.source.on(\"data\", this.sourceDataHandler);\n this.source.on(\"end\", this.sourceErrorOrEndHandler);\n this.source.on(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private removeSourceEventHandlers() {\n this.source.removeListener(\"data\", this.sourceDataHandler);\n this.source.removeListener(\"end\", this.sourceErrorOrEndHandler);\n this.source.removeListener(\"error\", this.sourceErrorOrEndHandler);\n }\n\n private sourceDataHandler = (data: Buffer) => {\n if (this.options.doInjectErrorOnce) {\n this.options.doInjectErrorOnce = undefined;\n this.source.pause();\n this.source.removeAllListeners(\"data\");\n this.source.emit(\"end\");\n return;\n }\n\n // console.log(\n // `Offset: ${this.offset}, Received ${data.length} from internal stream`\n // );\n this.offset += data.length;\n if (this.onProgress) {\n this.onProgress({ loadedBytes: this.offset - this.start });\n }\n if (!this.push(data)) {\n this.source.pause();\n }\n };\n\n private sourceErrorOrEndHandler = (err?: Error) => {\n if (err && err.name === \"AbortError\") {\n this.destroy(err);\n return;\n }\n\n // console.log(\n // `Source stream emits end or error, offset: ${\n // this.offset\n // }, dest end : ${this.end}`\n // );\n this.removeSourceEventHandlers();\n if (this.offset - 1 === this.end) {\n this.push(null);\n } else if (this.offset <= this.end) {\n // console.log(\n // `retries: ${this.retries}, max retries: ${this.maxRetries}`\n // );\n if (this.retries < this.maxRetryRequests) {\n this.retries += 1;\n this.getter(this.offset)\n .then((newSource) => {\n this.source = newSource;\n this.setSourceEventHandlers();\n return;\n })\n .catch((error) => {\n this.destroy(error);\n });\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${\n this.offset - 1\n }, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${\n this.maxRetryRequests\n }`\n )\n );\n }\n } else {\n this.destroy(\n new Error(\n `Data corruption failure: Received more data than original request, data needed offset is ${\n this.end\n }, received offset: ${this.offset - 1}`\n )\n );\n }\n };\n\n _destroy(error: Error | null, callback: (error?: Error) => void): void {\n // remove listener from source and release source\n this.removeSourceEventHandlers();\n (this.source as Readable).destroy();\n\n callback(error === null ? undefined : error);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { HttpResponse, isNode } from \"@azure/core-http\";\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\n\nimport {\n BlobDownloadHeaders,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n} from \"./generatedModels\";\nimport { BlobDownloadResponseParsed, Metadata, ObjectReplicationPolicy } from \"./models\";\nimport {\n ReadableStreamGetter,\n RetriableReadableStream,\n RetriableReadableStreamOptions,\n} from \"./utils/RetriableReadableStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will\n * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot\n * trigger retries defined in pipeline retry policy.)\n *\n * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js\n * Readable stream.\n */\nexport class BlobDownloadResponse implements BlobDownloadResponseParsed {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return this.originalResponse.copyCompletedOn;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The number of tags associated with the blob\n *\n * @readonly\n */\n public get tagCount(): number | undefined {\n return this.originalResponse.tagCount;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * Returns the UTC date and time generated by the service that indicates the time at which the blob was\n * last read or written to.\n *\n * @readonly\n */\n public get lastAccessed(): Date | undefined {\n return this.originalResponse.lastAccessed;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the Blob service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * Indicates the versionId of the downloaded blob version.\n *\n * @readonly\n */\n public get versionId(): string | undefined {\n return this.originalResponse.versionId;\n }\n\n /**\n * Indicates whether version of this blob is a current version.\n *\n * @readonly\n */\n public get isCurrentVersion(): boolean | undefined {\n return this.originalResponse.isCurrentVersion;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * Object Replication Policy Id of the destination blob.\n *\n * @readonly\n */\n public get objectReplicationDestinationPolicyId(): string | undefined {\n return this.originalResponse.objectReplicationDestinationPolicyId;\n }\n\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n *\n * @readonly\n */\n public get objectReplicationSourceProperties(): ObjectReplicationPolicy[] | undefined {\n return this.originalResponse.objectReplicationSourceProperties;\n }\n\n /**\n * If this blob has been sealed.\n *\n * @readonly\n */\n public get isSealed(): boolean | undefined {\n return this.originalResponse.isSealed;\n }\n\n /**\n * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire.\n *\n * @readonly\n */\n public get immutabilityPolicyExpiresOn(): Date | undefined {\n return this.originalResponse.immutabilityPolicyExpiresOn;\n }\n\n /**\n * Indicates immutability policy mode.\n *\n * @readonly\n */\n public get immutabilityPolicyMode(): BlobImmutabilityPolicyMode | undefined {\n return this.originalResponse.immutabilityPolicyMode;\n }\n\n /**\n * Indicates if a legal hold is present on the blob.\n *\n * @readonly\n */\n public get legalHold(): boolean | undefined {\n return this.originalResponse.legalHold;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get contentAsBlob(): Promise | undefined {\n return this.originalResponse.blobBody;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will automatically retry when internal read stream unexpected ends.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobDownloadResponseParsed;\n private blobDownloadStream?: RetriableReadableStream;\n\n /**\n * Creates an instance of BlobDownloadResponse.\n *\n * @param originalResponse -\n * @param getter -\n * @param offset -\n * @param count -\n * @param options -\n */\n public constructor(\n originalResponse: BlobDownloadResponseParsed,\n getter: ReadableStreamGetter,\n offset: number,\n count: number,\n options: RetriableReadableStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new RetriableReadableStream(\n this.originalResponse.readableStreamBody!,\n getter,\n offset,\n count,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport const AVRO_SYNC_MARKER_SIZE: number = 16;\nexport const AVRO_INIT_BYTES: Uint8Array = new Uint8Array([79, 98, 106, 1]);\nexport const AVRO_CODEC_KEY: string = \"avro.codec\";\nexport const AVRO_SCHEMA_KEY: string = \"avro.schema\";\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of the Object usage and non-interfaces\n/* eslint-disable @typescript-eslint/ban-types, @azure/azure-sdk/ts-use-interface-parameters */\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { KeyValuePair } from \"./utils/utils.common\";\n\n/**\n * Options to configure the AvroParser read methods.\n * See {@link AvroParser.readFixedBytes}, {@link AvroParser.readMap} and etc.\n */\ninterface AvroParserReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroParser {\n /**\n * Reads a fixed number of bytes from the stream.\n *\n * @param stream -\n * @param length -\n * @param options -\n */\n public static async readFixedBytes(\n stream: AvroReadable,\n length: number,\n options: AvroParserReadOptions = {}\n ): Promise {\n const bytes = await stream.read(length, { abortSignal: options.abortSignal });\n if (bytes.length !== length) {\n throw new Error(\"Hit stream end.\");\n }\n return bytes;\n }\n\n /**\n * Reads a single byte from the stream.\n *\n * @param stream -\n * @param options -\n */\n private static async readByte(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const buf = await AvroParser.readFixedBytes(stream, 1, options);\n return buf[0];\n }\n\n // int and long are stored in variable-length zig-zag coding.\n // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt\n // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types\n private static async readZigZagLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n let zigZagEncoded = 0;\n let significanceInBit = 0;\n let byte, haveMoreByte, significanceInFloat;\n\n do {\n byte = await AvroParser.readByte(stream, options);\n haveMoreByte = byte & 0x80;\n zigZagEncoded |= (byte & 0x7f) << significanceInBit;\n significanceInBit += 7;\n } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers\n\n if (haveMoreByte) {\n // Switch to float arithmetic\n // eslint-disable-next-line no-self-assign\n zigZagEncoded = zigZagEncoded;\n significanceInFloat = 268435456; // 2 ** 28.\n do {\n byte = await AvroParser.readByte(stream, options);\n zigZagEncoded += (byte & 0x7f) * significanceInFloat;\n significanceInFloat *= 128; // 2 ** 7\n } while (byte & 0x80);\n\n const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2;\n if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) {\n throw new Error(\"Integer overflow.\");\n }\n return res;\n }\n\n return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1);\n }\n\n public static async readLong(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readInt(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n return AvroParser.readZigZagLong(stream, options);\n }\n\n public static async readNull(): Promise {\n return null;\n }\n\n public static async readBoolean(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const b = await AvroParser.readByte(stream, options);\n if (b === 1) {\n return true;\n } else if (b === 0) {\n return false;\n } else {\n throw new Error(\"Byte was not a boolean.\");\n }\n }\n\n public static async readFloat(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 4, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat32(0, true); // littleEndian = true\n }\n\n public static async readDouble(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readFixedBytes(stream, 8, options);\n const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength);\n return view.getFloat64(0, true); // littleEndian = true\n }\n\n public static async readBytes(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const size = await AvroParser.readLong(stream, options);\n if (size < 0) {\n throw new Error(\"Bytes size was negative.\");\n }\n\n return stream.read(size, { abortSignal: options.abortSignal });\n }\n\n public static async readString(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n const u8arr = await AvroParser.readBytes(stream, options);\n const utf8decoder = new TextDecoder();\n return utf8decoder.decode(u8arr);\n }\n\n private static async readMapPair(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const key = await AvroParser.readString(stream, options);\n // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter.\n const value = await readItemMethod(stream, options);\n return { key, value };\n }\n\n public static async readMap(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise> {\n const readPairMethod = (\n s: AvroReadable,\n opts: AvroParserReadOptions = {}\n ): Promise> => {\n return AvroParser.readMapPair(s, readItemMethod, opts);\n };\n\n const pairs: KeyValuePair[] = await AvroParser.readArray(stream, readPairMethod, options);\n\n const dict: Record = {};\n for (const pair of pairs) {\n dict[pair.key] = pair.value;\n }\n return dict;\n }\n\n private static async readArray(\n stream: AvroReadable,\n readItemMethod: (s: AvroReadable, options?: AvroParserReadOptions) => Promise,\n options: AvroParserReadOptions = {}\n ): Promise {\n const items: T[] = [];\n for (\n let count = await AvroParser.readLong(stream, options);\n count !== 0;\n count = await AvroParser.readLong(stream, options)\n ) {\n if (count < 0) {\n // Ignore block sizes\n await AvroParser.readLong(stream, options);\n count = -count;\n }\n\n while (count--) {\n const item: T = await readItemMethod(stream, options);\n items.push(item);\n }\n }\n return items;\n }\n}\n\ninterface RecordField {\n name: string;\n type: string | ObjectSchema | (string | ObjectSchema)[]; // Unions may not immediately contain other unions.\n}\n\nenum AvroComplex {\n RECORD = \"record\",\n ENUM = \"enum\",\n ARRAY = \"array\",\n MAP = \"map\",\n UNION = \"union\",\n FIXED = \"fixed\",\n}\n\ninterface ObjectSchema {\n type: Exclude;\n name?: string;\n aliases?: string;\n fields?: RecordField[];\n symbols?: string[];\n values?: string;\n size?: number;\n}\n\nenum AvroPrimitive {\n NULL = \"null\",\n BOOLEAN = \"boolean\",\n INT = \"int\",\n LONG = \"long\",\n FLOAT = \"float\",\n DOUBLE = \"double\",\n BYTES = \"bytes\",\n STRING = \"string\",\n}\n\nexport abstract class AvroType {\n /**\n * Reads an object from the stream.\n */\n public abstract read(\n stream: AvroReadable,\n options?: AvroParserReadOptions\n ): Promise; // eslint-disable-line @typescript-eslint/ban-types\n\n /**\n * Determines the AvroType from the Avro Schema.\n */\n public static fromSchema(schema: string | Object): AvroType {\n if (typeof schema === \"string\") {\n return AvroType.fromStringSchema(schema);\n } else if (Array.isArray(schema)) {\n return AvroType.fromArraySchema(schema);\n } else {\n return AvroType.fromObjectSchema(schema as ObjectSchema);\n }\n }\n\n private static fromStringSchema(schema: string): AvroType {\n switch (schema) {\n case AvroPrimitive.NULL:\n case AvroPrimitive.BOOLEAN:\n case AvroPrimitive.INT:\n case AvroPrimitive.LONG:\n case AvroPrimitive.FLOAT:\n case AvroPrimitive.DOUBLE:\n case AvroPrimitive.BYTES:\n case AvroPrimitive.STRING:\n return new AvroPrimitiveType(schema as AvroPrimitive);\n default:\n throw new Error(`Unexpected Avro type ${schema}`);\n }\n }\n\n private static fromArraySchema(schema: any[]): AvroType {\n return new AvroUnionType(schema.map(AvroType.fromSchema));\n }\n\n private static fromObjectSchema(schema: ObjectSchema): AvroType {\n const type = schema.type;\n // Primitives can be defined as strings or objects\n try {\n return AvroType.fromStringSchema(type);\n } catch (err: any) {\n // eslint-disable-line no-empty\n }\n\n switch (type) {\n case AvroComplex.RECORD:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.name) {\n throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`);\n }\n\n // eslint-disable-next-line no-case-declarations\n const fields: Record = {};\n if (!schema.fields) {\n throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`);\n }\n for (const field of schema.fields) {\n fields[field.name] = AvroType.fromSchema(field.type);\n }\n return new AvroRecordType(fields, schema.name);\n case AvroComplex.ENUM:\n if (schema.aliases) {\n throw new Error(`aliases currently is not supported, schema: ${schema}`);\n }\n if (!schema.symbols) {\n throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`);\n }\n return new AvroEnumType(schema.symbols);\n case AvroComplex.MAP:\n if (!schema.values) {\n throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`);\n }\n return new AvroMapType(AvroType.fromSchema(schema.values));\n case AvroComplex.ARRAY: // Unused today\n case AvroComplex.FIXED: // Unused today\n default:\n throw new Error(`Unexpected Avro type ${type} in ${schema}`);\n }\n }\n}\n\nclass AvroPrimitiveType extends AvroType {\n private _primitive: AvroPrimitive;\n\n constructor(primitive: AvroPrimitive) {\n super();\n this._primitive = primitive;\n }\n\n public read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise {\n switch (this._primitive) {\n case AvroPrimitive.NULL:\n return AvroParser.readNull();\n case AvroPrimitive.BOOLEAN:\n return AvroParser.readBoolean(stream, options);\n case AvroPrimitive.INT:\n return AvroParser.readInt(stream, options);\n case AvroPrimitive.LONG:\n return AvroParser.readLong(stream, options);\n case AvroPrimitive.FLOAT:\n return AvroParser.readFloat(stream, options);\n case AvroPrimitive.DOUBLE:\n return AvroParser.readDouble(stream, options);\n case AvroPrimitive.BYTES:\n return AvroParser.readBytes(stream, options);\n case AvroPrimitive.STRING:\n return AvroParser.readString(stream, options);\n default:\n throw new Error(\"Unknown Avro Primitive\");\n }\n }\n}\n\nclass AvroEnumType extends AvroType {\n private readonly _symbols: string[];\n\n constructor(symbols: string[]) {\n super();\n this._symbols = symbols;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const value = await AvroParser.readInt(stream, options);\n return this._symbols[value];\n }\n}\n\nclass AvroUnionType extends AvroType {\n private readonly _types: AvroType[];\n\n constructor(types: AvroType[]) {\n super();\n this._types = types;\n }\n\n public async read(\n stream: AvroReadable,\n options: AvroParserReadOptions = {}\n ): Promise { // eslint-disable-line @typescript-eslint/ban-types\n const typeIndex = await AvroParser.readInt(stream, options);\n return this._types[typeIndex].read(stream, options);\n }\n}\n\nclass AvroMapType extends AvroType {\n private readonly _itemType: AvroType;\n\n constructor(itemType: AvroType) {\n super();\n this._itemType = itemType;\n }\n\n public read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const readItemMethod = (\n s: AvroReadable,\n opts?: AvroParserReadOptions\n ): Promise => { \n return this._itemType.read(s, opts);\n };\n return AvroParser.readMap(stream, readItemMethod, options);\n }\n}\n\nclass AvroRecordType extends AvroType {\n private readonly _name: string;\n private readonly _fields: Record;\n\n constructor(fields: Record, name: string) {\n super();\n this._fields = fields;\n this._name = name;\n }\n\n public async read(stream: AvroReadable, options: AvroParserReadOptions = {}): Promise {\n const record: Record = {};\n record[\"$schema\"] = this._name;\n for (const key in this._fields) {\n if (Object.prototype.hasOwnProperty.call(this._fields, key)) {\n record[key] = await this._fields[key].read(stream, options);\n }\n }\n return record;\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nexport interface KeyValuePair {\n key: string;\n value: T;\n}\n\nexport function arraysEqual(a: Uint8Array, b: Uint8Array): boolean {\n if (a === b) return true;\n // eslint-disable-next-line eqeqeq\n if (a == null || b == null) return false;\n if (a.length !== b.length) return false;\n\n for (let i = 0; i < a.length; ++i) {\n if (a[i] !== b[i]) return false;\n }\n return true;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// TODO: Do a review of non-interfaces\n/* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */\n\nimport \"@azure/core-paging\";\nimport {\n AVRO_CODEC_KEY,\n AVRO_INIT_BYTES,\n AVRO_SCHEMA_KEY,\n AVRO_SYNC_MARKER_SIZE,\n} from \"./AvroConstants\";\nimport { AvroParser, AvroType } from \"./AvroParser\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { AvroReadable } from \"./AvroReadable\";\nimport { arraysEqual } from \"./utils/utils.common\";\n\n/**\n * Options to configure the {@link AvroReader.parseObjects} operation.\n */\nexport interface AvroParseOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport class AvroReader {\n private readonly _dataStream: AvroReadable;\n\n private readonly _headerStream: AvroReadable;\n\n private _syncMarker?: Uint8Array;\n\n private _metadata?: Record;\n\n private _itemType?: AvroType;\n\n private _itemsRemainingInBlock?: number;\n\n // Remembers where we started if partial data stream was provided.\n private readonly _initialBlockOffset: number;\n\n /// The byte offset within the Avro file (both header and data)\n /// of the start of the current block.\n private _blockOffset: number;\n public get blockOffset(): number {\n return this._blockOffset;\n }\n\n private _objectIndex: number;\n public get objectIndex(): number {\n return this._objectIndex;\n }\n\n private _initialized: boolean;\n\n constructor(dataStream: AvroReadable);\n\n constructor(\n dataStream: AvroReadable,\n headerStream: AvroReadable,\n currentBlockOffset: number,\n indexWithinCurrentBlock: number\n );\n\n constructor(\n dataStream: AvroReadable,\n headerStream?: AvroReadable,\n currentBlockOffset?: number,\n indexWithinCurrentBlock?: number\n ) {\n this._dataStream = dataStream;\n this._headerStream = headerStream || dataStream;\n this._initialized = false;\n this._blockOffset = currentBlockOffset || 0;\n this._objectIndex = indexWithinCurrentBlock || 0;\n this._initialBlockOffset = currentBlockOffset || 0;\n }\n\n private async initialize(options: AvroParseOptions = {}): Promise {\n const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, {\n abortSignal: options.abortSignal,\n });\n if (!arraysEqual(header, AVRO_INIT_BYTES)) {\n throw new Error(\"Stream is not an Avro file.\");\n }\n\n // File metadata is written as if defined by the following map schema:\n // { \"type\": \"map\", \"values\": \"bytes\"}\n this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, {\n abortSignal: options.abortSignal,\n });\n\n // Validate codec\n const codec = this._metadata![AVRO_CODEC_KEY];\n if (!(codec === undefined || codec === null || codec === \"null\")) {\n throw new Error(\"Codecs are not supported\");\n }\n\n // The 16-byte, randomly-generated sync marker for this file.\n this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n // Parse the schema\n const schema = JSON.parse(this._metadata![AVRO_SCHEMA_KEY]);\n this._itemType = AvroType.fromSchema(schema);\n\n if (this._blockOffset === 0) {\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n }\n\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n // skip block length\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n\n this._initialized = true;\n if (this._objectIndex && this._objectIndex > 0) {\n for (let i = 0; i < this._objectIndex; i++) {\n await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal });\n this._itemsRemainingInBlock!--;\n }\n }\n }\n\n public hasNext(): boolean {\n return !this._initialized || this._itemsRemainingInBlock! > 0;\n }\n\n public async *parseObjects(\n options: AvroParseOptions = {}\n ): AsyncIterableIterator | null> {\n if (!this._initialized) {\n await this.initialize(options);\n }\n\n while (this.hasNext()) {\n const result = await this._itemType!.read(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n\n this._itemsRemainingInBlock!--;\n this._objectIndex!++;\n\n if (this._itemsRemainingInBlock === 0) {\n const marker = await AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, {\n abortSignal: options.abortSignal,\n });\n\n this._blockOffset = this._initialBlockOffset + this._dataStream.position;\n this._objectIndex = 0;\n\n if (!arraysEqual(this._syncMarker!, marker)) {\n throw new Error(\"Stream is not a valid Avro file.\");\n }\n\n try {\n this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, {\n abortSignal: options.abortSignal,\n });\n } catch (err: any) {\n // We hit the end of the stream.\n this._itemsRemainingInBlock = 0;\n }\n\n if (this._itemsRemainingInBlock! > 0) {\n // Ignore block size\n await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal });\n }\n }\n yield result;\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\n\n/**\n * Options to configure the {@link AvroReadable.read} operation.\n */\nexport interface AvroReadableReadOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\nexport abstract class AvroReadable {\n public abstract get position(): number;\n public abstract read(size: number, options?: AvroReadableReadOptions): Promise;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AvroReadable, AvroReadableReadOptions } from \"./AvroReadable\";\nimport { AbortError } from \"@azure/abort-controller\";\n\nconst ABORT_ERROR = new AbortError(\"Reading from the avro stream was aborted.\");\n\nexport class AvroReadableFromStream extends AvroReadable {\n private _position: number;\n private _readable: NodeJS.ReadableStream;\n\n private toUint8Array(data: string | Buffer): Uint8Array {\n if (typeof data === \"string\") {\n return Buffer.from(data);\n }\n return data;\n }\n\n constructor(readable: NodeJS.ReadableStream) {\n super();\n this._readable = readable;\n this._position = 0;\n }\n public get position(): number {\n return this._position;\n }\n public async read(size: number, options: AvroReadableReadOptions = {}): Promise {\n if (options.abortSignal?.aborted) {\n throw ABORT_ERROR;\n }\n\n if (size < 0) {\n throw new Error(`size parameter should be positive: ${size}`);\n }\n\n if (size === 0) {\n return new Uint8Array();\n }\n\n if (!this._readable.readable) {\n throw new Error(\"Stream no longer readable.\");\n }\n // See if there is already enough data.\n const chunk = this._readable.read(size);\n if (chunk) {\n this._position += chunk.length;\n // chunk.length maybe less than desired size if the stream ends.\n return this.toUint8Array(chunk);\n } else {\n // register callback to wait for enough data to read\n return new Promise((resolve, reject) => {\n /* eslint-disable @typescript-eslint/no-use-before-define */\n const cleanUp: () => void = () => {\n this._readable.removeListener(\"readable\", readableCallback);\n this._readable.removeListener(\"error\", rejectCallback);\n this._readable.removeListener(\"end\", rejectCallback);\n this._readable.removeListener(\"close\", rejectCallback);\n\n if (options.abortSignal) {\n options.abortSignal!.removeEventListener(\"abort\", abortHandler);\n }\n };\n\n const readableCallback: () => void = () => {\n const callbackChunk = this._readable.read(size);\n if (callbackChunk) {\n this._position += callbackChunk.length;\n cleanUp();\n // callbackChunk.length maybe less than desired size if the stream ends.\n resolve(this.toUint8Array(callbackChunk));\n }\n };\n\n const rejectCallback: () => void = () => {\n cleanUp();\n reject();\n };\n\n const abortHandler: () => void = () => {\n cleanUp();\n reject(ABORT_ERROR);\n };\n\n this._readable.on(\"readable\", readableCallback);\n this._readable.once(\"error\", rejectCallback);\n this._readable.once(\"end\", rejectCallback);\n this._readable.once(\"close\", rejectCallback);\n if (options.abortSignal) {\n options.abortSignal!.addEventListener(\"abort\", abortHandler);\n }\n /* eslint-enable @typescript-eslint/no-use-before-define */\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable } from \"stream\";\n\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport { TransferProgressEvent } from \"@azure/core-http\";\n\nimport { AvroReadableFromStream, AvroReader } from \"../../../storage-internal-avro/src\";\nimport { BlobQueryError } from \"../Clients\";\n\nexport interface BlobQuickQueryStreamOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Read progress event handler\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query.\n */\nexport class BlobQuickQueryStream extends Readable {\n private source: NodeJS.ReadableStream;\n private avroReader: AvroReader;\n private avroIter: AsyncIterableIterator;\n private avroPaused: boolean = true;\n private onProgress?: (progress: TransferProgressEvent) => void;\n private onError?: (error: BlobQueryError) => void;\n\n /**\n * Creates an instance of BlobQuickQueryStream.\n *\n * @param source - The current ReadableStream returned from getter\n * @param options -\n */\n public constructor(source: NodeJS.ReadableStream, options: BlobQuickQueryStreamOptions = {}) {\n super();\n this.source = source;\n this.onProgress = options.onProgress;\n this.onError = options.onError;\n this.avroReader = new AvroReader(new AvroReadableFromStream(this.source));\n this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal });\n }\n\n public _read(): void {\n if (this.avroPaused) {\n this.readInternal().catch((err) => {\n this.emit(\"error\", err);\n });\n }\n }\n\n private async readInternal() {\n this.avroPaused = false;\n let avroNext;\n do {\n avroNext = await this.avroIter.next();\n if (avroNext.done) {\n break;\n }\n const obj = avroNext.value;\n const schema = (obj as any).$schema;\n if (typeof schema !== \"string\") {\n throw Error(\"Missing schema in avro record.\");\n }\n\n switch (schema) {\n case \"com.microsoft.azure.storage.queryBlobContents.resultData\":\n {\n const data = (obj as any).data;\n if (data instanceof Uint8Array === false) {\n throw Error(\"Invalid data in avro result record.\");\n }\n if (!this.push(Buffer.from(data))) {\n this.avroPaused = true;\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.progress\":\n {\n const bytesScanned = (obj as any).bytesScanned;\n if (typeof bytesScanned !== \"number\") {\n throw Error(\"Invalid bytesScanned in avro progress record.\");\n }\n if (this.onProgress) {\n this.onProgress({ loadedBytes: bytesScanned });\n }\n }\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.end\":\n if (this.onProgress) {\n const totalBytes = (obj as any).totalBytes;\n if (typeof totalBytes !== \"number\") {\n throw Error(\"Invalid totalBytes in avro end record.\");\n }\n this.onProgress({ loadedBytes: totalBytes });\n }\n this.push(null);\n break;\n case \"com.microsoft.azure.storage.queryBlobContents.error\":\n if (this.onError) {\n const fatal = (obj as any).fatal;\n if (typeof fatal !== \"boolean\") {\n throw Error(\"Invalid fatal in avro error record.\");\n }\n const name = (obj as any).name;\n if (typeof name !== \"string\") {\n throw Error(\"Invalid name in avro error record.\");\n }\n const description = (obj as any).description;\n if (typeof description !== \"string\") {\n throw Error(\"Invalid description in avro error record.\");\n }\n const position = (obj as any).position;\n if (typeof position !== \"number\") {\n throw Error(\"Invalid position in avro error record.\");\n }\n this.onError({\n position,\n name,\n isFatal: fatal,\n description,\n });\n }\n break;\n default:\n throw Error(`Unknown schema ${schema} in avro progress record.`);\n }\n } while (!avroNext.done && !this.avroPaused);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse, isNode } from \"@azure/core-http\";\n\nimport {\n BlobDownloadResponseModel,\n BlobType,\n CopyStatusType,\n LeaseDurationType,\n LeaseStateType,\n LeaseStatusType,\n BlobDownloadHeaders,\n BlobQueryResponseModel,\n} from \"./generatedModels\";\nimport { Metadata } from \"./models\";\nimport { BlobQuickQueryStream, BlobQuickQueryStreamOptions } from \"./utils/BlobQuickQueryStream\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will\n * parse avor data returned by blob query.\n */\nexport class BlobQueryResponse implements BlobDownloadResponseModel {\n /**\n * Indicates that the service supports\n * requests for partial file content.\n *\n * @readonly\n */\n public get acceptRanges(): string | undefined {\n return this.originalResponse.acceptRanges;\n }\n\n /**\n * Returns if it was previously specified\n * for the file.\n *\n * @readonly\n */\n public get cacheControl(): string | undefined {\n return this.originalResponse.cacheControl;\n }\n\n /**\n * Returns the value that was specified\n * for the 'x-ms-content-disposition' header and specifies how to process the\n * response.\n *\n * @readonly\n */\n public get contentDisposition(): string | undefined {\n return this.originalResponse.contentDisposition;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Encoding request header.\n *\n * @readonly\n */\n public get contentEncoding(): string | undefined {\n return this.originalResponse.contentEncoding;\n }\n\n /**\n * Returns the value that was specified\n * for the Content-Language request header.\n *\n * @readonly\n */\n public get contentLanguage(): string | undefined {\n return this.originalResponse.contentLanguage;\n }\n\n /**\n * The current sequence number for a\n * page blob. This header is not returned for block blobs or append blobs.\n *\n * @readonly\n */\n public get blobSequenceNumber(): number | undefined {\n return this.originalResponse.blobSequenceNumber;\n }\n\n /**\n * The blob's type. Possible values include:\n * 'BlockBlob', 'PageBlob', 'AppendBlob'.\n *\n * @readonly\n */\n public get blobType(): BlobType | undefined {\n return this.originalResponse.blobType;\n }\n\n /**\n * The number of bytes present in the\n * response body.\n *\n * @readonly\n */\n public get contentLength(): number | undefined {\n return this.originalResponse.contentLength;\n }\n\n /**\n * If the file has an MD5 hash and the\n * request is to read the full file, this response header is returned so that\n * the client can check for message content integrity. If the request is to\n * read a specified range and the 'x-ms-range-get-content-md5' is set to\n * true, then the request returns an MD5 hash for the range, as long as the\n * range size is less than or equal to 4 MB. If neither of these sets of\n * conditions is true, then no value is returned for the 'Content-MD5'\n * header.\n *\n * @readonly\n */\n public get contentMD5(): Uint8Array | undefined {\n return this.originalResponse.contentMD5;\n }\n\n /**\n * Indicates the range of bytes returned if\n * the client requested a subset of the file by setting the Range request\n * header.\n *\n * @readonly\n */\n public get contentRange(): string | undefined {\n return this.originalResponse.contentRange;\n }\n\n /**\n * The content type specified for the file.\n * The default content type is 'application/octet-stream'\n *\n * @readonly\n */\n public get contentType(): string | undefined {\n return this.originalResponse.contentType;\n }\n\n /**\n * Conclusion time of the last attempted\n * Copy File operation where this file was the destination file. This value\n * can specify the time of a completed, aborted, or failed copy attempt.\n *\n * @readonly\n */\n public get copyCompletedOn(): Date | undefined {\n return undefined;\n }\n\n /**\n * String identifier for the last attempted Copy\n * File operation where this file was the destination file.\n *\n * @readonly\n */\n public get copyId(): string | undefined {\n return this.originalResponse.copyId;\n }\n\n /**\n * Contains the number of bytes copied and\n * the total bytes in the source in the last attempted Copy File operation\n * where this file was the destination file. Can show between 0 and\n * Content-Length bytes copied.\n *\n * @readonly\n */\n public get copyProgress(): string | undefined {\n return this.originalResponse.copyProgress;\n }\n\n /**\n * URL up to 2KB in length that specifies the\n * source file used in the last attempted Copy File operation where this file\n * was the destination file.\n *\n * @readonly\n */\n public get copySource(): string | undefined {\n return this.originalResponse.copySource;\n }\n\n /**\n * State of the copy operation\n * identified by 'x-ms-copy-id'. Possible values include: 'pending',\n * 'success', 'aborted', 'failed'\n *\n * @readonly\n */\n public get copyStatus(): CopyStatusType | undefined {\n return this.originalResponse.copyStatus;\n }\n\n /**\n * Only appears when\n * x-ms-copy-status is failed or pending. Describes cause of fatal or\n * non-fatal copy operation failure.\n *\n * @readonly\n */\n public get copyStatusDescription(): string | undefined {\n return this.originalResponse.copyStatusDescription;\n }\n\n /**\n * When a blob is leased,\n * specifies whether the lease is of infinite or fixed duration. Possible\n * values include: 'infinite', 'fixed'.\n *\n * @readonly\n */\n public get leaseDuration(): LeaseDurationType | undefined {\n return this.originalResponse.leaseDuration;\n }\n\n /**\n * Lease state of the blob. Possible\n * values include: 'available', 'leased', 'expired', 'breaking', 'broken'.\n *\n * @readonly\n */\n public get leaseState(): LeaseStateType | undefined {\n return this.originalResponse.leaseState;\n }\n\n /**\n * The current lease status of the\n * blob. Possible values include: 'locked', 'unlocked'.\n *\n * @readonly\n */\n public get leaseStatus(): LeaseStatusType | undefined {\n return this.originalResponse.leaseStatus;\n }\n\n /**\n * A UTC date/time value generated by the service that\n * indicates the time at which the response was initiated.\n *\n * @readonly\n */\n public get date(): Date | undefined {\n return this.originalResponse.date;\n }\n\n /**\n * The number of committed blocks\n * present in the blob. This header is returned only for append blobs.\n *\n * @readonly\n */\n public get blobCommittedBlockCount(): number | undefined {\n return this.originalResponse.blobCommittedBlockCount;\n }\n\n /**\n * The ETag contains a value that you can use to\n * perform operations conditionally, in quotes.\n *\n * @readonly\n */\n public get etag(): string | undefined {\n return this.originalResponse.etag;\n }\n\n /**\n * The error code.\n *\n * @readonly\n */\n public get errorCode(): string | undefined {\n return this.originalResponse.errorCode;\n }\n\n /**\n * The value of this header is set to\n * true if the file data and application metadata are completely encrypted\n * using the specified algorithm. Otherwise, the value is set to false (when\n * the file is unencrypted, or if only parts of the file/application metadata\n * are encrypted).\n *\n * @readonly\n */\n public get isServerEncrypted(): boolean | undefined {\n return this.originalResponse.isServerEncrypted;\n }\n\n /**\n * If the blob has a MD5 hash, and if\n * request contains range header (Range or x-ms-range), this response header\n * is returned with the value of the whole blob's MD5 value. This value may\n * or may not be equal to the value returned in Content-MD5 header, with the\n * latter calculated from the requested range.\n *\n * @readonly\n */\n public get blobContentMD5(): Uint8Array | undefined {\n return this.originalResponse.blobContentMD5;\n }\n\n /**\n * Returns the date and time the file was last\n * modified. Any operation that modifies the file or its properties updates\n * the last modified time.\n *\n * @readonly\n */\n public get lastModified(): Date | undefined {\n return this.originalResponse.lastModified;\n }\n\n /**\n * A name-value pair\n * to associate with a file storage object.\n *\n * @readonly\n */\n public get metadata(): Metadata | undefined {\n return this.originalResponse.metadata;\n }\n\n /**\n * This header uniquely identifies the request\n * that was made and can be used for troubleshooting the request.\n *\n * @readonly\n */\n public get requestId(): string | undefined {\n return this.originalResponse.requestId;\n }\n\n /**\n * If a client request id header is sent in the request, this header will be present in the\n * response with the same value.\n *\n * @readonly\n */\n public get clientRequestId(): string | undefined {\n return this.originalResponse.clientRequestId;\n }\n\n /**\n * Indicates the version of the File service used\n * to execute the request.\n *\n * @readonly\n */\n public get version(): string | undefined {\n return this.originalResponse.version;\n }\n\n /**\n * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned\n * when the blob was encrypted with a customer-provided key.\n *\n * @readonly\n */\n public get encryptionKeySha256(): string | undefined {\n return this.originalResponse.encryptionKeySha256;\n }\n\n /**\n * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to\n * true, then the request returns a crc64 for the range, as long as the range size is less than\n * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is\n * specified in the same request, it will fail with 400(Bad Request)\n */\n public get contentCrc64(): Uint8Array | undefined {\n return this.originalResponse.contentCrc64;\n }\n\n /**\n * The response body as a browser Blob.\n * Always undefined in node.js.\n *\n * @readonly\n */\n public get blobBody(): Promise | undefined {\n return undefined;\n }\n\n /**\n * The response body as a node.js Readable stream.\n * Always undefined in the browser.\n *\n * It will parse avor data returned by blob query.\n *\n * @readonly\n */\n public get readableStreamBody(): NodeJS.ReadableStream | undefined {\n return isNode ? this.blobDownloadStream : undefined;\n }\n\n /**\n * The HTTP response.\n */\n public get _response(): HttpResponse & {\n parsedHeaders: BlobDownloadHeaders;\n } {\n return this.originalResponse._response;\n }\n\n private originalResponse: BlobQueryResponseModel;\n private blobDownloadStream?: BlobQuickQueryStream;\n\n /**\n * Creates an instance of BlobQueryResponse.\n *\n * @param originalResponse -\n * @param options -\n */\n public constructor(\n originalResponse: BlobQueryResponseModel,\n options: BlobQuickQueryStreamOptions = {}\n ) {\n this.originalResponse = originalResponse;\n this.blobDownloadStream = new BlobQuickQueryStream(\n this.originalResponse.readableStreamBody!,\n options\n );\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BlobImmutabilityPolicyMode } from \"./generatedModels\";\nimport {\n LeaseAccessConditions,\n SequenceNumberAccessConditions,\n AppendPositionAccessConditions,\n AccessTier,\n CpkInfo,\n BlobDownloadResponseModel,\n} from \"./generatedModels\";\nimport { EncryptionAlgorithmAES25 } from \"./utils/constants\";\n\n/**\n * Blob tags.\n */\nexport type Tags = Record;\n\n/**\n * A map of name-value pairs to associate with the resource.\n */\nexport interface Metadata {\n /**\n * A name-value pair.\n */\n [propertyName: string]: string;\n}\n\n/**\n * standard HTTP conditional headers and tags condition.\n */\nexport interface ModifiedAccessConditions\n extends MatchConditions,\n ModificationConditions,\n TagConditions {}\n\n/**\n * standard HTTP conditional headers, tags condition and lease condition\n */\nexport interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions {}\n\n/**\n * Conditions to add to the creation of this page blob.\n */\nexport interface PageBlobRequestConditions\n extends BlobRequestConditions,\n SequenceNumberAccessConditions {}\n\n/**\n * Conditions to add to the creation of this append blob.\n */\nexport interface AppendBlobRequestConditions\n extends BlobRequestConditions,\n AppendPositionAccessConditions {}\n\n/**\n * Specifies HTTP options for conditional requests based on modification time.\n */\nexport interface ModificationConditions {\n /**\n * Specify this header value to operate only on a blob if it has been modified since the\n * specified date/time.\n */\n ifModifiedSince?: Date;\n /**\n * Specify this header value to operate only on a blob if it has not been modified since the\n * specified date/time.\n */\n ifUnmodifiedSince?: Date;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on ETag matching.\n */\nexport interface MatchConditions {\n /**\n * Specify an ETag value to operate only on blobs with a matching value.\n */\n ifMatch?: string;\n /**\n * Specify an ETag value to operate only on blobs without a matching value.\n */\n ifNoneMatch?: string;\n}\n\n/**\n * Specifies HTTP options for conditional requests based on blob tags.\n */\nexport interface TagConditions {\n /**\n * Optional SQL statement to apply to the tags of the blob.\n */\n tagConditions?: string;\n}\n\n/**\n * Conditions to meet for the container.\n */\nexport interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions {}\n\n/**\n * Represents the access tier on a blob.\n * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.}\n */\nexport enum BlockBlobTier {\n /**\n * Optimized for storing data that is accessed frequently.\n */\n Hot = \"Hot\",\n /**\n * Optimized for storing data that is infrequently accessed and stored for at least 30 days.\n */\n Cool = \"Cool\",\n /**\n * Optimized for storing data that is rarely accessed and stored for at least 180 days\n * with flexible latency requirements (on the order of hours).\n */\n Archive = \"Archive\",\n}\n\n/**\n * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts.\n * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here}\n * for detailed information on the corresponding IOPS and throughput per PageBlobTier.\n */\nexport enum PremiumPageBlobTier {\n /**\n * P4 Tier.\n */\n P4 = \"P4\",\n /**\n * P6 Tier.\n */\n P6 = \"P6\",\n /**\n * P10 Tier.\n */\n P10 = \"P10\",\n /**\n * P15 Tier.\n */\n P15 = \"P15\",\n /**\n * P20 Tier.\n */\n P20 = \"P20\",\n /**\n * P30 Tier.\n */\n P30 = \"P30\",\n /**\n * P40 Tier.\n */\n P40 = \"P40\",\n /**\n * P50 Tier.\n */\n P50 = \"P50\",\n /**\n * P60 Tier.\n */\n P60 = \"P60\",\n /**\n * P70 Tier.\n */\n P70 = \"P70\",\n /**\n * P80 Tier.\n */\n P80 = \"P80\",\n}\n\nexport function toAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string | undefined\n): AccessTier | undefined {\n if (tier === undefined) {\n return undefined;\n }\n\n return tier as AccessTier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service).\n}\n\nexport function ensureCpkIfSpecified(cpk: CpkInfo | undefined, isHttps: boolean): void {\n if (cpk && !isHttps) {\n throw new RangeError(\"Customer-provided encryption key must be used over HTTPS.\");\n }\n\n if (cpk && !cpk.encryptionAlgorithm) {\n cpk.encryptionAlgorithm = EncryptionAlgorithmAES25;\n }\n}\n\n/**\n * Specifies the Replication Status of a blob. This is used when a storage account has\n * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}.\n */\nexport type ObjectReplicationStatus = \"complete\" | \"failed\";\n\n/**\n * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob.\n * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}.\n */\nexport interface ObjectReplicationRule {\n /**\n * The Object Replication Rule ID.\n */\n ruleId: string;\n\n /**\n * The Replication Status\n */\n replicationStatus: ObjectReplicationStatus;\n}\n\n/**\n * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}.\n * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the\n * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses\n * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}.\n */\nexport interface ObjectReplicationPolicy {\n /**\n * The Object Replication Policy ID.\n */\n policyId: string;\n\n /**\n * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID.\n */\n rules: ObjectReplicationRule[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadResponseParsed extends BlobDownloadResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * The type of a {@link BlobQueryArrowField}.\n */\nexport type BlobQueryArrowFieldType =\n | \"int64\"\n | \"bool\"\n | \"timestamp[ms]\"\n | \"string\"\n | \"double\"\n | \"decimal\";\n\n/**\n * Describe a field in {@link BlobQueryArrowConfiguration}.\n */\nexport interface BlobQueryArrowField {\n /**\n * The type of the field.\n */\n type: BlobQueryArrowFieldType;\n\n /**\n * The name of the field.\n */\n name?: string;\n\n /**\n * The precision of the field. Required if type is \"decimal\".\n */\n precision?: number;\n\n /**\n * The scale of the field. Required if type is is \"decimal\".\n */\n scale?: number;\n}\n\n/**\n * Describe immutable policy for blob.\n */\nexport interface BlobImmutabilityPolicy {\n /**\n * Specifies the date time when the blobs immutability policy is set to expire.\n */\n expiriesOn?: Date;\n /**\n * Specifies the immutability policy mode to set on the blob.\n */\n policyMode?: BlobImmutabilityPolicyMode;\n}\n\n/**\n * Represents authentication information in Authorization, ProxyAuthorization,\n * WWW-Authenticate, and Proxy-Authenticate header values.\n */\nexport interface HttpAuthorization {\n /**\n * The scheme to use for authorization.\n */\n scheme: string;\n\n /**\n * the credentials containing the authentication information of the user agent for the resource being requested.\n */\n value: string;\n}\n\n/**\n * Defines the known cloud audiences for Storage.\n */\nexport enum StorageBlobAudience {\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Storage.\n */\n StorageOAuthScopes = \"https://storage.azure.com/.default\",\n /**\n * The OAuth scope to use to retrieve an AAD token for Azure Disk.\n */\n DiskComputeOAuthScopes = \"https://disk.compute.azure.com/.default\",\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpResponse } from \"@azure/core-http\";\nimport {\n PageBlobGetPageRangesHeaders,\n PageBlobGetPageRangesDiffHeaders,\n PageBlobGetPageRangesResponseModel,\n PageBlobGetPageRangesDiffResponseModel,\n} from \"./generatedModels\";\nimport { Range } from \"./Range\";\n\n/**\n * List of page ranges for a blob.\n */\nexport interface PageList {\n /**\n * Valid non-overlapping page ranges.\n */\n pageRange?: Range[];\n /**\n * Present if the prevSnapshot parameter was specified and there were cleared\n * pages between the previous snapshot and the target snapshot.\n */\n clearRange?: Range[];\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Contains response data for the {@link BlobClient.getPageRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffResponse\n extends PageList,\n PageBlobGetPageRangesDiffHeaders {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: PageBlobGetPageRangesDiffHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: PageList;\n };\n}\n\n/**\n * Function that converts PageRange and ClearRange to a common Range object.\n * PageRange and ClearRange have start and end while Range offset and count\n * this function normalizes to Range.\n * @param response - Model PageBlob Range response\n */\nexport function rangeResponseFromModel(\n response: PageBlobGetPageRangesResponseModel | PageBlobGetPageRangesDiffResponseModel\n): PageBlobGetPageRangesResponse | PageBlobGetPageRangesDiffResponse {\n const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({\n offset: x.start,\n count: x.end - x.start,\n }));\n\n return {\n ...response,\n pageRange,\n clearRange,\n _response: {\n ...response._response,\n parsedBody: {\n pageRange,\n clearRange,\n },\n },\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { delay } from \"@azure/core-http\";\nimport { PollOperation, PollOperationState, Poller } from \"@azure/core-lro\";\nimport { BlobClient, BlobStartCopyFromURLOptions, BlobBeginCopyFromURLResponse } from \"../Clients\";\n\n/**\n * Defines the operations from a {@link BlobClient} that are needed for the poller\n * returned by {@link BlobClient.beginCopyFromURL} to work.\n */\nexport type CopyPollerBlobClient = Pick & {\n startCopyFromURL(\n copySource: string,\n options?: BlobStartCopyFromURLOptions\n ): Promise;\n};\n\n/**\n * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}.\n *\n * This state is passed into the user-specified `onProgress` callback\n * whenever copy progress is detected.\n */\nexport interface BlobBeginCopyFromUrlPollState\n extends PollOperationState {\n /**\n * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}.\n */\n readonly blobClient: CopyPollerBlobClient;\n /**\n * The copyId that identifies the in-progress blob copy.\n */\n copyId?: string;\n /**\n * the progress of the blob copy as reported by the service.\n */\n copyProgress?: string;\n /**\n * The source URL provided in {@link BlobClient.beginCopyFromURL}.\n */\n copySource: string;\n /**\n * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call.\n * This is exposed for the poller and should not be modified directly.\n */\n readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * The PollOperation responsible for:\n * - performing the initial startCopyFromURL\n * - checking the copy status via getProperties\n * - cancellation via abortCopyFromURL\n * @hidden\n */\nexport interface BlobBeginCopyFromURLPollOperation\n extends PollOperation {}\n\n/**\n * The set of options used to configure the poller.\n * This is an internal interface populated by {@link BlobClient.beginCopyFromURL}.\n *\n * @hidden\n */\nexport interface BlobBeginCopyFromUrlPollerOptions {\n blobClient: CopyPollerBlobClient;\n copySource: string;\n intervalInMs?: number;\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n resumeFrom?: string;\n startCopyFromURLOptions?: BlobStartCopyFromURLOptions;\n}\n\n/**\n * This is the poller returned by {@link BlobClient.beginCopyFromURL}.\n * This can not be instantiated directly outside of this package.\n *\n * @hidden\n */\nexport class BlobBeginCopyFromUrlPoller extends Poller<\n BlobBeginCopyFromUrlPollState,\n BlobBeginCopyFromURLResponse\n> {\n public intervalInMs: number;\n\n constructor(options: BlobBeginCopyFromUrlPollerOptions) {\n const {\n blobClient,\n copySource,\n intervalInMs = 15000,\n onProgress,\n resumeFrom,\n startCopyFromURLOptions,\n } = options;\n\n let state: BlobBeginCopyFromUrlPollState | undefined;\n\n if (resumeFrom) {\n state = JSON.parse(resumeFrom).state;\n }\n\n const operation = makeBlobBeginCopyFromURLPollOperation({\n ...state,\n blobClient,\n copySource,\n startCopyFromURLOptions,\n });\n\n super(operation);\n\n if (typeof onProgress === \"function\") {\n this.onProgress(onProgress);\n }\n\n this.intervalInMs = intervalInMs;\n }\n\n public delay(): Promise {\n return delay(this.intervalInMs);\n }\n}\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst cancel: BlobBeginCopyFromURLPollOperation[\"cancel\"] = async function cancel(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n) {\n const state = this.state;\n const { copyId } = state;\n if (state.isCompleted) {\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n if (!copyId) {\n state.isCancelled = true;\n return makeBlobBeginCopyFromURLPollOperation(state);\n }\n\n // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call\n await state.blobClient.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n });\n state.isCancelled = true;\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst update: BlobBeginCopyFromURLPollOperation[\"update\"] = async function update(\n this: BlobBeginCopyFromURLPollOperation,\n options = {}\n): Promise {\n const state = this.state;\n const { blobClient, copySource, startCopyFromURLOptions } = state;\n\n if (!state.isStarted) {\n state.isStarted = true;\n const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions);\n\n // copyId is needed to abort\n state.copyId = result.copyId;\n if (result.copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n }\n } else if (!state.isCompleted) {\n try {\n const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal });\n const { copyStatus, copyProgress } = result;\n const prevCopyProgress = state.copyProgress;\n if (copyProgress) {\n state.copyProgress = copyProgress;\n }\n if (\n copyStatus === \"pending\" &&\n copyProgress !== prevCopyProgress &&\n typeof options.fireProgress === \"function\"\n ) {\n // trigger in setTimeout, or swallow error?\n options.fireProgress(state);\n } else if (copyStatus === \"success\") {\n state.result = result;\n state.isCompleted = true;\n } else if (copyStatus === \"failed\") {\n state.error = new Error(\n `Blob copy failed with reason: \"${result.copyStatusDescription || \"unknown\"}\"`\n );\n state.isCompleted = true;\n }\n } catch (err: any) {\n state.error = err;\n state.isCompleted = true;\n }\n }\n\n return makeBlobBeginCopyFromURLPollOperation(state);\n};\n\n/**\n * Note: Intentionally using function expression over arrow function expression\n * so that the function can be invoked with a different context.\n * This affects what `this` refers to.\n * @hidden\n */\nconst toString: BlobBeginCopyFromURLPollOperation[\"toString\"] = function toString(\n this: BlobBeginCopyFromURLPollOperation\n) {\n return JSON.stringify({ state: this.state }, (key, value) => {\n // remove blobClient from serialized state since a client can't be hydrated from this info.\n if (key === \"blobClient\") {\n return undefined;\n }\n return value;\n });\n};\n\n/**\n * Creates a poll operation given the provided state.\n * @hidden\n */\nfunction makeBlobBeginCopyFromURLPollOperation(\n state: BlobBeginCopyFromUrlPollState\n): BlobBeginCopyFromURLPollOperation {\n return {\n state: { ...state },\n cancel,\n toString,\n update,\n };\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * Range for Blob Service Operations.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations\n */\nexport interface Range {\n /**\n * StartByte, larger than or equal 0.\n */\n offset: number;\n /**\n * Optional. Count of bytes, larger than 0.\n * If not provided, will return bytes from offset to the end.\n */\n count?: number;\n}\n\n/**\n * Generate a range string. For example:\n *\n * \"bytes=255-\" or \"bytes=0-511\"\n *\n * @param iRange -\n */\nexport function rangeToString(iRange: Range): string {\n if (iRange.offset < 0) {\n throw new RangeError(`Range.offset cannot be smaller than 0.`);\n }\n if (iRange.count && iRange.count <= 0) {\n throw new RangeError(\n `Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`\n );\n }\n return iRange.count\n ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}`\n : `bytes=${iRange.offset}-`;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n// In browser, during webpack or browserify bundling, this module will be replaced by 'events'\n// https://github.com/Gozala/events\nimport { EventEmitter } from \"events\";\n\n/**\n * Operation is an async function to be executed and managed by Batch.\n */\nexport declare type Operation = () => Promise;\n\n/**\n * States for Batch.\n */\nenum BatchStates {\n Good,\n Error,\n}\n\n/**\n * Batch provides basic parallel execution with concurrency limits.\n * Will stop execute left operations when one of the executed operation throws an error.\n * But Batch cannot cancel ongoing operations, you need to cancel them by yourself.\n */\nexport class Batch {\n /**\n * Concurrency. Must be lager than 0.\n */\n private concurrency: number;\n\n /**\n * Number of active operations under execution.\n */\n private actives: number = 0;\n\n /**\n * Number of completed operations under execution.\n */\n private completed: number = 0;\n\n /**\n * Offset of next operation to be executed.\n */\n private offset: number = 0;\n\n /**\n * Operation array to be executed.\n */\n private operations: Operation[] = [];\n\n /**\n * States of Batch. When an error happens, state will turn into error.\n * Batch will stop execute left operations.\n */\n private state: BatchStates = BatchStates.Good;\n\n /**\n * A private emitter used to pass events inside this class.\n */\n private emitter: EventEmitter;\n\n /**\n * Creates an instance of Batch.\n * @param concurrency -\n */\n public constructor(concurrency: number = 5) {\n if (concurrency < 1) {\n throw new RangeError(\"concurrency must be larger than 0\");\n }\n this.concurrency = concurrency;\n this.emitter = new EventEmitter();\n }\n\n /**\n * Add a operation into queue.\n *\n * @param operation -\n */\n public addOperation(operation: Operation): void {\n this.operations.push(async () => {\n try {\n this.actives++;\n await operation();\n this.actives--;\n this.completed++;\n this.parallelExecute();\n } catch (error: any) {\n this.emitter.emit(\"error\", error);\n }\n });\n }\n\n /**\n * Start execute operations in the queue.\n *\n */\n public async do(): Promise {\n if (this.operations.length === 0) {\n return Promise.resolve();\n }\n\n this.parallelExecute();\n\n return new Promise((resolve, reject) => {\n this.emitter.on(\"finish\", resolve);\n\n this.emitter.on(\"error\", (error) => {\n this.state = BatchStates.Error;\n reject(error);\n });\n });\n }\n\n /**\n * Get next operation to be executed. Return null when reaching ends.\n *\n */\n private nextOperation(): Operation | null {\n if (this.offset < this.operations.length) {\n return this.operations[this.offset++];\n }\n return null;\n }\n\n /**\n * Start execute operations. One one the most important difference between\n * this method with do() is that do() wraps as an sync method.\n *\n */\n private parallelExecute(): void {\n if (this.state === BatchStates.Error) {\n return;\n }\n\n if (this.completed >= this.operations.length) {\n this.emitter.emit(\"finish\");\n return;\n }\n\n while (this.actives < this.concurrency) {\n const operation = this.nextOperation();\n if (operation) {\n operation();\n } else {\n return;\n }\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { Readable, ReadableOptions } from \"stream\";\n\n/**\n * Options to configure the BuffersStream.\n */\nexport interface BuffersStreamOptions extends ReadableOptions {}\n\n/**\n * This class generates a readable stream from the data in an array of buffers.\n */\nexport class BuffersStream extends Readable {\n /**\n * The offset of data to be read in the current buffer.\n */\n private byteOffsetInCurrentBuffer: number;\n\n /**\n * The index of buffer to be read in the array of buffers.\n */\n private bufferIndex: number;\n\n /**\n * The total length of data already read.\n */\n private pushedBytesLength: number;\n\n /**\n * Creates an instance of BuffersStream that will emit the data\n * contained in the array of buffers.\n *\n * @param buffers - Array of buffers containing the data\n * @param byteLength - The total length of data contained in the buffers\n */\n constructor(\n private buffers: Buffer[],\n private byteLength: number,\n options?: BuffersStreamOptions\n ) {\n super(options);\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex = 0;\n this.pushedBytesLength = 0;\n\n // check byteLength is no larger than buffers[] total length\n let buffersLength = 0;\n for (const buf of this.buffers) {\n buffersLength += buf.byteLength;\n }\n if (buffersLength < this.byteLength) {\n throw new Error(\"Data size shouldn't be larger than the total length of buffers.\");\n }\n }\n\n /**\n * Internal _read() that will be called when the stream wants to pull more data in.\n *\n * @param size - Optional. The size of data to be read\n */\n public _read(size?: number) {\n if (this.pushedBytesLength >= this.byteLength) {\n this.push(null);\n }\n\n if (!size) {\n size = this.readableHighWaterMark;\n }\n\n const outBuffers: Buffer[] = [];\n let i = 0;\n while (i < size && this.pushedBytesLength < this.byteLength) {\n // The last buffer may be longer than the data it contains.\n const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength;\n const remainingCapacityInThisBuffer =\n this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer;\n const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers);\n if (remaining > size - i) {\n // chunkSize = size - i\n const end = this.byteOffsetInCurrentBuffer + size - i;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n this.pushedBytesLength += size - i;\n this.byteOffsetInCurrentBuffer = end;\n i = size;\n break;\n } else {\n // chunkSize = remaining\n const end = this.byteOffsetInCurrentBuffer + remaining;\n outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end));\n if (remaining === remainingCapacityInThisBuffer) {\n // this.buffers[this.bufferIndex] used up, shift to next one\n this.byteOffsetInCurrentBuffer = 0;\n this.bufferIndex++;\n } else {\n this.byteOffsetInCurrentBuffer = end;\n }\n this.pushedBytesLength += remaining;\n i += remaining;\n }\n }\n\n if (outBuffers.length > 1) {\n this.push(Buffer.concat(outBuffers));\n } else if (outBuffers.length === 1) {\n this.push(outBuffers[0]);\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { BuffersStream } from \"./BuffersStream\";\nimport { Readable } from \"stream\";\n\n/**\n * maxBufferLength is max size of each buffer in the pooled buffers.\n */\n// Can't use import as Typescript doesn't recognize \"buffer\".\nconst maxBufferLength = require(\"buffer\").constants.MAX_LENGTH;\n\n/**\n * This class provides a buffer container which conceptually has no hard size limit.\n * It accepts a capacity, an array of input buffers and the total length of input data.\n * It will allocate an internal \"buffer\" of the capacity and fill the data in the input buffers\n * into the internal \"buffer\" serially with respect to the total length.\n * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream\n * assembled from all the data in the internal \"buffer\".\n */\nexport class PooledBuffer {\n /**\n * Internal buffers used to keep the data.\n * Each buffer has a length of the maxBufferLength except last one.\n */\n private buffers: Buffer[] = [];\n\n /**\n * The total size of internal buffers.\n */\n private readonly capacity: number;\n\n /**\n * The total size of data contained in internal buffers.\n */\n private _size: number;\n\n /**\n * The size of the data contained in the pooled buffers.\n */\n public get size(): number {\n return this._size;\n }\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated but contains no data.\n * Users may call the {@link PooledBuffer.fill} method to fill this\n * pooled buffer with data.\n *\n * @param capacity - Total capacity of the internal buffers\n */\n constructor(capacity: number);\n\n /**\n * Creates an instance of PooledBuffer with given capacity.\n * Internal buffers are allocated and filled with data in the input buffers serially\n * with respect to the total length.\n *\n * @param capacity - Total capacity of the internal buffers\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n */\n constructor(capacity: number, buffers: Buffer[], totalLength: number);\n constructor(capacity: number, buffers?: Buffer[], totalLength?: number) {\n this.capacity = capacity;\n this._size = 0;\n\n // allocate\n const bufferNum = Math.ceil(capacity / maxBufferLength);\n for (let i = 0; i < bufferNum; i++) {\n let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength;\n if (len === 0) {\n len = maxBufferLength;\n }\n this.buffers.push(Buffer.allocUnsafe(len));\n }\n\n if (buffers) {\n this.fill(buffers, totalLength!);\n }\n }\n\n /**\n * Fill the internal buffers with data in the input buffers serially\n * with respect to the total length and the total capacity of the internal buffers.\n * Data copied will be shift out of the input buffers.\n *\n * @param buffers - Input buffers containing the data to be filled in the pooled buffer\n * @param totalLength - Total length of the data to be filled in.\n *\n */\n public fill(buffers: Buffer[], totalLength: number) {\n this._size = Math.min(this.capacity, totalLength);\n\n let i = 0,\n j = 0,\n targetOffset = 0,\n sourceOffset = 0,\n totalCopiedNum = 0;\n while (totalCopiedNum < this._size) {\n const source = buffers[i];\n const target = this.buffers[j];\n const copiedNum = source.copy(target, targetOffset, sourceOffset);\n\n totalCopiedNum += copiedNum;\n sourceOffset += copiedNum;\n targetOffset += copiedNum;\n if (sourceOffset === source.length) {\n i++;\n sourceOffset = 0;\n }\n if (targetOffset === target.length) {\n j++;\n targetOffset = 0;\n }\n }\n\n // clear copied from source buffers\n buffers.splice(0, i);\n if (buffers.length > 0) {\n buffers[0] = buffers[0].slice(sourceOffset);\n }\n }\n\n /**\n * Get the readable stream assembled from all the data in the internal buffers.\n *\n */\n public getReadableStream(): Readable {\n return new BuffersStream(this.buffers, this.size);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { EventEmitter } from \"events\";\nimport { Readable } from \"stream\";\nimport { PooledBuffer } from \"./PooledBuffer\";\n\n/**\n * OutgoingHandler is an async function triggered by BufferScheduler.\n */\nexport declare type OutgoingHandler = (\n body: () => NodeJS.ReadableStream,\n length: number,\n offset?: number\n) => Promise;\n\n/**\n * This class accepts a Node.js Readable stream as input, and keeps reading data\n * from the stream into the internal buffer structure, until it reaches maxBuffers.\n * Every available buffer will try to trigger outgoingHandler.\n *\n * The internal buffer structure includes an incoming buffer array, and a outgoing\n * buffer array. The incoming buffer array includes the \"empty\" buffers can be filled\n * with new incoming data. The outgoing array includes the filled buffers to be\n * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize.\n *\n * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING\n *\n * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * 1. Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n * 2. concurrency should set a smaller value than maxBuffers, which is helpful to\n * reduce the possibility when a outgoing handler waits for the stream data.\n * in this situation, outgoing handlers are blocked.\n * Outgoing queue shouldn't be empty.\n */\nexport class BufferScheduler {\n /**\n * Size of buffers in incoming and outgoing queues. This class will try to align\n * data read from Readable stream into buffer chunks with bufferSize defined.\n */\n private readonly bufferSize: number;\n\n /**\n * How many buffers can be created or maintained.\n */\n private readonly maxBuffers: number;\n\n /**\n * A Node.js Readable stream.\n */\n private readonly readable: Readable;\n\n /**\n * OutgoingHandler is an async function triggered by BufferScheduler when there\n * are available buffers in outgoing array.\n */\n private readonly outgoingHandler: OutgoingHandler;\n\n /**\n * An internal event emitter.\n */\n private readonly emitter: EventEmitter = new EventEmitter();\n\n /**\n * Concurrency of executing outgoingHandlers. (0 lesser than concurrency lesser than or equal to maxBuffers)\n */\n private readonly concurrency: number;\n\n /**\n * An internal offset marker to track data offset in bytes of next outgoingHandler.\n */\n private offset: number = 0;\n\n /**\n * An internal marker to track whether stream is end.\n */\n private isStreamEnd: boolean = false;\n\n /**\n * An internal marker to track whether stream or outgoingHandler returns error.\n */\n private isError: boolean = false;\n\n /**\n * How many handlers are executing.\n */\n private executingOutgoingHandlers: number = 0;\n\n /**\n * Encoding of the input Readable stream which has string data type instead of Buffer.\n */\n private encoding?: BufferEncoding;\n\n /**\n * How many buffers have been allocated.\n */\n private numBuffers: number = 0;\n\n /**\n * Because this class doesn't know how much data every time stream pops, which\n * is defined by highWaterMarker of the stream. So BufferScheduler will cache\n * data received from the stream, when data in unresolvedDataArray exceeds the\n * blockSize defined, it will try to concat a blockSize of buffer, fill into available\n * buffers from incoming and push to outgoing array.\n */\n private unresolvedDataArray: Buffer[] = [];\n\n /**\n * How much data consisted in unresolvedDataArray.\n */\n private unresolvedLength: number = 0;\n\n /**\n * The array includes all the available buffers can be used to fill data from stream.\n */\n private incoming: PooledBuffer[] = [];\n\n /**\n * The array (queue) includes all the buffers filled from stream data.\n */\n private outgoing: PooledBuffer[] = [];\n\n /**\n * Creates an instance of BufferScheduler.\n *\n * @param readable - A Node.js Readable stream\n * @param bufferSize - Buffer size of every maintained buffer\n * @param maxBuffers - How many buffers can be allocated\n * @param outgoingHandler - An async function scheduled to be\n * triggered when a buffer fully filled\n * with stream data\n * @param concurrency - Concurrency of executing outgoingHandlers (>0)\n * @param encoding - [Optional] Encoding of Readable stream when it's a string stream\n */\n constructor(\n readable: Readable,\n bufferSize: number,\n maxBuffers: number,\n outgoingHandler: OutgoingHandler,\n concurrency: number,\n encoding?: BufferEncoding\n ) {\n if (bufferSize <= 0) {\n throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`);\n }\n\n if (maxBuffers <= 0) {\n throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`);\n }\n\n if (concurrency <= 0) {\n throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`);\n }\n\n this.bufferSize = bufferSize;\n this.maxBuffers = maxBuffers;\n this.readable = readable;\n this.outgoingHandler = outgoingHandler;\n this.concurrency = concurrency;\n this.encoding = encoding;\n }\n\n /**\n * Start the scheduler, will return error when stream of any of the outgoingHandlers\n * returns error.\n *\n */\n public async do(): Promise {\n return new Promise((resolve, reject) => {\n this.readable.on(\"data\", (data) => {\n data = typeof data === \"string\" ? Buffer.from(data, this.encoding) : data;\n this.appendUnresolvedData(data);\n\n if (!this.resolveData()) {\n this.readable.pause();\n }\n });\n\n this.readable.on(\"error\", (err) => {\n this.emitter.emit(\"error\", err);\n });\n\n this.readable.on(\"end\", () => {\n this.isStreamEnd = true;\n this.emitter.emit(\"checkEnd\");\n });\n\n this.emitter.on(\"error\", (err) => {\n this.isError = true;\n this.readable.pause();\n reject(err);\n });\n\n this.emitter.on(\"checkEnd\", () => {\n if (this.outgoing.length > 0) {\n this.triggerOutgoingHandlers();\n return;\n }\n\n if (this.isStreamEnd && this.executingOutgoingHandlers === 0) {\n if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) {\n const buffer = this.shiftBufferFromUnresolvedDataArray();\n this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset)\n .then(resolve)\n .catch(reject);\n } else if (this.unresolvedLength >= this.bufferSize) {\n return;\n } else {\n resolve();\n }\n }\n });\n });\n }\n\n /**\n * Insert a new data into unresolved array.\n *\n * @param data -\n */\n private appendUnresolvedData(data: Buffer) {\n this.unresolvedDataArray.push(data);\n this.unresolvedLength += data.length;\n }\n\n /**\n * Try to shift a buffer with size in blockSize. The buffer returned may be less\n * than blockSize when data in unresolvedDataArray is less than bufferSize.\n *\n */\n private shiftBufferFromUnresolvedDataArray(buffer?: PooledBuffer): PooledBuffer {\n if (!buffer) {\n buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength);\n } else {\n buffer.fill(this.unresolvedDataArray, this.unresolvedLength);\n }\n\n this.unresolvedLength -= buffer.size;\n return buffer;\n }\n\n /**\n * Resolve data in unresolvedDataArray. For every buffer with size in blockSize\n * shifted, it will try to get (or allocate a buffer) from incoming, and fill it,\n * then push it into outgoing to be handled by outgoing handler.\n *\n * Return false when available buffers in incoming are not enough, else true.\n *\n * @returns Return false when buffers in incoming are not enough, else true.\n */\n private resolveData(): boolean {\n while (this.unresolvedLength >= this.bufferSize) {\n let buffer: PooledBuffer;\n\n if (this.incoming.length > 0) {\n buffer = this.incoming.shift()!;\n this.shiftBufferFromUnresolvedDataArray(buffer);\n } else {\n if (this.numBuffers < this.maxBuffers) {\n buffer = this.shiftBufferFromUnresolvedDataArray();\n this.numBuffers++;\n } else {\n // No available buffer, wait for buffer returned\n return false;\n }\n }\n\n this.outgoing.push(buffer);\n this.triggerOutgoingHandlers();\n }\n return true;\n }\n\n /**\n * Try to trigger a outgoing handler for every buffer in outgoing. Stop when\n * concurrency reaches.\n */\n private async triggerOutgoingHandlers() {\n let buffer: PooledBuffer | undefined;\n do {\n if (this.executingOutgoingHandlers >= this.concurrency) {\n return;\n }\n\n buffer = this.outgoing.shift();\n if (buffer) {\n this.triggerOutgoingHandler(buffer);\n }\n } while (buffer);\n }\n\n /**\n * Trigger a outgoing handler for a buffer shifted from outgoing.\n *\n * @param buffer -\n */\n private async triggerOutgoingHandler(buffer: PooledBuffer): Promise {\n const bufferLength = buffer.size;\n\n this.executingOutgoingHandlers++;\n this.offset += bufferLength;\n\n try {\n await this.outgoingHandler(\n () => buffer.getReadableStream(),\n bufferLength,\n this.offset - bufferLength\n );\n } catch (err: any) {\n this.emitter.emit(\"error\", err);\n return;\n }\n\n this.executingOutgoingHandlers--;\n this.reuseBuffer(buffer);\n this.emitter.emit(\"checkEnd\");\n }\n\n /**\n * Return buffer used by outgoing handler into incoming.\n *\n * @param buffer -\n */\n private reuseBuffer(buffer: PooledBuffer) {\n this.incoming.push(buffer);\n if (!this.isError && this.resolveData() && !this.isStreamEnd) {\n this.readable.resume();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport * as fs from \"fs\";\nimport * as util from \"util\";\n\n/**\n * Reads a readable stream into buffer. Fill the buffer from offset to end.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param offset - From which position in the buffer to be filled, inclusive\n * @param end - To which position in the buffer to be filled, exclusive\n * @param encoding - Encoding of the Readable stream\n */\nexport async function streamToBuffer(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n offset: number,\n end: number,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const count = end - offset; // Total amount of data needed in stream\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n if (pos >= count) {\n resolve();\n return;\n }\n\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n // How much data needed in this chunk\n const chunkLength = pos + chunk.length > count ? count - pos : chunk.length;\n\n buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength);\n pos += chunkLength;\n });\n\n stream.on(\"end\", () => {\n if (pos < count) {\n reject(\n new Error(\n `Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`\n )\n );\n }\n resolve();\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into buffer entirely.\n *\n * @param stream - A Node.js Readable stream\n * @param buffer - Buffer to be filled, length must greater than or equal to offset\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n * @throws `RangeError` If buffer size is not big enough.\n */\nexport async function streamToBuffer2(\n stream: NodeJS.ReadableStream,\n buffer: Buffer,\n encoding?: BufferEncoding\n): Promise {\n let pos = 0; // Position in stream\n const bufferSize = buffer.length;\n\n return new Promise((resolve, reject) => {\n stream.on(\"readable\", () => {\n let chunk = stream.read();\n if (!chunk) {\n return;\n }\n if (typeof chunk === \"string\") {\n chunk = Buffer.from(chunk, encoding);\n }\n\n if (pos + chunk.length > bufferSize) {\n reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`));\n return;\n }\n\n buffer.fill(chunk, pos, pos + chunk.length);\n pos += chunk.length;\n });\n\n stream.on(\"end\", () => {\n resolve(pos);\n });\n\n stream.on(\"error\", reject);\n });\n}\n\n/**\n * Reads a readable stream into a buffer.\n *\n * @param stream - A Node.js Readable stream\n * @param encoding - Encoding of the Readable stream\n * @returns with the count of bytes read.\n */\nexport async function streamToBuffer3(\n readableStream: NodeJS.ReadableStream,\n encoding?: BufferEncoding\n): Promise {\n return new Promise((resolve, reject) => {\n const chunks: Buffer[] = [];\n readableStream.on(\"data\", (data: Buffer | string) => {\n chunks.push(data instanceof Buffer ? data : Buffer.from(data, encoding));\n });\n readableStream.on(\"end\", () => {\n resolve(Buffer.concat(chunks));\n });\n readableStream.on(\"error\", reject);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed.\n *\n * @param rs - The read stream.\n * @param file - Destination file path.\n */\nexport async function readStreamToLocalFile(\n rs: NodeJS.ReadableStream,\n file: string\n): Promise {\n return new Promise((resolve, reject) => {\n const ws = fs.createWriteStream(file);\n\n rs.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"error\", (err: Error) => {\n reject(err);\n });\n\n ws.on(\"close\", resolve);\n\n rs.pipe(ws);\n });\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Promisified version of fs.stat().\n */\nexport const fsStat = util.promisify(fs.stat);\n\nexport const fsCreateReadStream = fs.createReadStream;\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n generateUuid,\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n TransferProgressEvent,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PollerLike, PollOperationState } from \"@azure/core-lro\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { Readable } from \"stream\";\n\nimport { BlobDownloadResponse } from \"./BlobDownloadResponse\";\nimport { BlobQueryResponse } from \"./BlobQueryResponse\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from \"./generated/src/operations\";\nimport {\n AppendBlobAppendBlockFromUrlResponse,\n AppendBlobAppendBlockResponse,\n AppendBlobCreateResponse,\n BlobAbortCopyFromURLResponse,\n BlobCopyFromURLResponse,\n BlobCreateSnapshotResponse,\n BlobDeleteResponse,\n BlobDownloadOptionalParams,\n BlobDownloadResponseModel,\n BlobGetPropertiesResponseModel,\n BlobGetTagsHeaders,\n BlobSetHTTPHeadersResponse,\n BlobSetMetadataResponse,\n BlobSetTagsResponse,\n BlobSetTierResponse,\n BlobStartCopyFromURLResponse,\n BlobTags,\n BlobUndeleteResponse,\n BlockBlobCommitBlockListResponse,\n BlockBlobGetBlockListResponse,\n BlockBlobStageBlockFromURLResponse,\n BlockBlobStageBlockResponse,\n BlockBlobUploadHeaders,\n BlockBlobUploadResponse,\n BlockListType,\n CpkInfo,\n DeleteSnapshotsOptionType,\n LeaseAccessConditions,\n PageBlobClearPagesResponse,\n PageBlobCopyIncrementalResponse,\n PageBlobCreateResponse,\n PageBlobResizeResponse,\n PageBlobUpdateSequenceNumberResponse,\n PageBlobUploadPagesFromURLResponse,\n PageBlobUploadPagesResponse,\n RehydratePriority,\n SequenceNumberActionType,\n BlockBlobPutBlobFromUrlResponse,\n BlobHTTPHeaders,\n PageBlobGetPageRangesResponseModel,\n PageRangeInfo,\n PageBlobGetPageRangesDiffResponseModel,\n BlobCopySourceTags,\n} from \"./generatedModels\";\nimport {\n AppendBlobRequestConditions,\n BlobDownloadResponseParsed,\n BlobRequestConditions,\n BlockBlobTier,\n ensureCpkIfSpecified,\n Metadata,\n ObjectReplicationPolicy,\n PageBlobRequestConditions,\n PremiumPageBlobTier,\n Tags,\n toAccessTier,\n TagConditions,\n MatchConditions,\n ModificationConditions,\n ModifiedAccessConditions,\n BlobQueryArrowField,\n BlobImmutabilityPolicy,\n HttpAuthorization,\n} from \"./models\";\nimport {\n PageBlobGetPageRangesDiffResponse,\n PageBlobGetPageRangesResponse,\n rangeResponseFromModel,\n} from \"./PageBlobRangeResponse\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport {\n BlobBeginCopyFromUrlPoller,\n BlobBeginCopyFromUrlPollState,\n CopyPollerBlobClient,\n} from \"./pollers/BlobStartCopyFromUrlPoller\";\nimport { Range, rangeToString } from \"./Range\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { Batch } from \"./utils/Batch\";\nimport { BufferScheduler } from \"../../storage-common/src\";\nimport {\n BlobDoesNotUseCustomerSpecifiedEncryption,\n BlobUsesCustomerSpecifiedEncryptionMsg,\n BLOCK_BLOB_MAX_BLOCKS,\n BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES,\n BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES,\n DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES,\n DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS,\n ETagAny,\n URLConstants,\n} from \"./utils/constants\";\nimport { createSpan, convertTracingToRequestOptionsBase } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n ExtractPageRangeInfoItems,\n generateBlockID,\n getURLParameter,\n httpAuthorizationToString,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n setURLParameter,\n toBlobTags,\n toBlobTagsString,\n toQuerySerialization,\n toTags,\n} from \"./utils/utils.common\";\nimport {\n fsCreateReadStream,\n fsStat,\n readStreamToLocalFile,\n streamToBuffer,\n} from \"./utils/utils.node\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobSASPermissions } from \"./sas/BlobSASPermissions\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n BlobDeleteImmutabilityPolicyResponse,\n BlobSetImmutabilityPolicyResponse,\n BlobSetLegalHoldResponse,\n} from \"./generatedModels\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions {\n /**\n * The amount of time in milliseconds the poller should wait between\n * calls to the service to determine the status of the Blob copy.\n * Defaults to 15 seconds.\n */\n intervalInMs?: number;\n /**\n * Callback to receive the state of the copy progress.\n */\n onProgress?: (state: BlobBeginCopyFromUrlPollState) => void;\n /**\n * Serialized poller state that can be used to resume polling from.\n * This may be useful when starting a copy on one process or thread\n * and you wish to continue polling on another process or thread.\n *\n * To get serialized poller state, call `poller.toString()` on an existing\n * poller.\n */\n resumeFrom?: string;\n}\n\n/**\n * Contains response data for the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse {}\n\n/**\n * Options to configure the {@link BlobClient.download} operation.\n */\nexport interface BlobDownloadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve.\n */\n snapshot?: string;\n /**\n * When this is set to true and download range of blob, the service returns the MD5 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentMD5?: boolean;\n /**\n * When this is set to true and download range of blob, the service returns the CRC64 hash for the range,\n * as long as the range is less than or equal to 4 MB in size.\n *\n * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time.\n */\n rangeGetContentCrc64?: boolean;\n /**\n * Conditions to meet when downloading blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Call back to receive events on the progress of download operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original body download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional `FileClient.download()` request will be made\n * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached.\n *\n * Default value is 5, please set a larger value when loading large files in poor network.\n */\n maxRetryRequests?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.exists} operation.\n */\nexport interface BlobExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Conditions to meet.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting blob properties.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.delete} operation.\n */\nexport interface BlobDeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies options to delete blobs that have associated snapshots.\n * - `include`: Delete the base blob and all of its snapshots.\n * - `only`: Delete only the blob's snapshots and not the blob itself.\n */\n deleteSnapshots?: DeleteSnapshotsOptionType;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.undelete} operation.\n */\nexport interface BlobUndeleteOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setHTTPHeaders} operation.\n */\nexport interface BlobSetHTTPHeadersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob HTTP headers.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure the {@link BlobClient.setMetadata} operation.\n */\nexport interface BlobSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting blob metadata.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.setTags} operation.\n */\nexport interface BlobSetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.getTags} operation.\n */\nexport interface BlobGetTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet for the blob to perform this operation.\n */\n conditions?: TagConditions & LeaseAccessConditions;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getTags} operation.\n */\nexport type BlobGetTagsResponse = { tags: Tags } & BlobGetTagsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: BlobGetTagsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: BlobTags;\n };\n };\n\n/**\n * Options to configure Blob - Acquire Lease operation.\n */\nexport interface BlobAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Release Lease operation.\n */\nexport interface BlobReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Renew Lease operation.\n */\nexport interface BlobRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Change Lease operation.\n */\nexport interface BlobChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Blob - Break Lease operation.\n */\nexport interface BlobBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease of a blob.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.createSnapshot} operation.\n */\nexport interface BlobCreateSnapshotOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet when creating blob snapshots.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link BlobClient.beginCopyFromURL} operation.\n */\nexport interface BlobStartCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the blob that are being copied.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | PremiumPageBlobTier | string;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Overrides the sealed state of the destination blob. Default true.\n */\n sealBlob?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobClient.abortCopyFromURL} operation.\n */\nexport interface BlobAbortCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure the {@link BlobClient.syncCopyFromURL} operation.\n */\nexport interface BlobSyncCopyFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the snapshot.\n */\n metadata?: Metadata;\n /**\n * Conditions to meet for the destination blob when copying from a URL to the blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Options to configure the {@link BlobClient.setAccessTier} operation.\n */\nexport interface BlobSetTierOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n /**\n * Rehydrate Priority - possible values include 'High', 'Standard'.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier\n */\n rehydratePriority?: RehydratePriority;\n}\n\n/**\n * Option interface for the {@link BlobClient.downloadToBuffer} operation.\n */\nexport interface BlobDownloadToBufferOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * blockSize is the data every request trying to download.\n * Must be greater than or equal to 0.\n * If set to 0 or undefined, blockSize will automatically calculated according to the blob size.\n */\n blockSize?: number;\n\n /**\n * Optional. ONLY AVAILABLE IN NODE.JS.\n *\n * How many retries will perform when original block download stream unexpected ends.\n * Above kind of ends will not trigger retry policy defined in a pipeline,\n * because they doesn't emit network errors.\n *\n * With this option, every additional retry means an additional FileClient.download() request will be made\n * from the broken point, until the requested block has been successfully downloaded or\n * maxRetryRequestsPerBlock is reached.\n *\n * Default value is 5, please set a larger value when in poor network.\n */\n maxRetryRequestsPerBlock?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel download.\n */\n concurrency?: number;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Contains response data for the {@link BlobClient.deleteIfExists} operation.\n */\nexport interface BlobDeleteIfExistsResponse extends BlobDeleteResponse {\n /**\n * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link BlobClient.getProperties} operation.\n */\nexport interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel {\n /**\n * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob.\n */\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n\n /**\n * Object Replication Policy Id of the destination blob.\n */\n objectReplicationDestinationPolicyId?: string;\n}\n\n/**\n * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}.\n */\nexport interface CommonGenerateSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols, HTTPS only or HTTPSandHTTP\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * Optional only when identifier is provided. The time after which the SAS will no longer work.\n */\n expiresOn?: Date;\n\n /**\n * Optional. IP ranges allowed in this SAS.\n */\n ipRange?: SasIPRange;\n\n /**\n * Optional. The name of the access policy on the container this SAS references if any.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy\n */\n identifier?: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n\n /**\n * Optional. The cache-control header for the SAS.\n */\n cacheControl?: string;\n\n /**\n * Optional. The content-disposition header for the SAS.\n */\n contentDisposition?: string;\n\n /**\n * Optional. The content-encoding header for the SAS.\n */\n contentEncoding?: string;\n\n /**\n * Optional. The content-language header for the SAS.\n */\n contentLanguage?: string;\n\n /**\n * Optional. The content-type header for the SAS.\n */\n contentType?: string;\n}\n\n/**\n * Options to configure {@link BlobClient.generateSasUrl} operation.\n */\nexport interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: BlobSASPermissions;\n}\n\n/**\n * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation.\n */\nexport interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation.\n */\nexport interface BlobSetImmutabilityPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n modifiedAccessCondition?: ModificationConditions;\n}\n\n/**\n * Options for setting legal hold {@link BlobClient.setLegalHold} operation.\n */\nexport interface BlobSetLegalHoldOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob,\n * append blob, or page blob.\n */\nexport class BlobClient extends StorageClient {\n /**\n * blobContext provided by protocol layer.\n */\n private blobContext: StorageBlob;\n\n private _name: string;\n private _containerName: string;\n\n private _versionId?: string;\n private _snapshot?: string;\n\n /**\n * The name of the blob.\n */\n public get name(): string {\n return this._name;\n }\n\n /**\n * The name of the storage container the blob is associated with.\n */\n public get containerName(): string {\n return this._containerName;\n }\n\n /**\n *\n * Creates an instance of BlobClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n options = options || {};\n let pipeline: PipelineLike;\n let url: string;\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n\n super(url, pipeline);\n ({ blobName: this._name, containerName: this._containerName } =\n this.getBlobAndContainerNamesFromUrl());\n this.blobContext = new StorageBlob(this.storageClientContext);\n\n this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT) as string;\n this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID) as string;\n }\n\n /**\n * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp\n */\n public withSnapshot(snapshot: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a new BlobClient object pointing to a version of this blob.\n * Provide \"\" will remove the versionId and return a Client to the base blob.\n *\n * @param versionId - The versionId.\n * @returns A new BlobClient object pointing to the version of this blob.\n */\n public withVersion(versionId: string): BlobClient {\n return new BlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.VERSIONID,\n versionId.length === 0 ? undefined : versionId\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a AppendBlobClient object.\n *\n */\n public getAppendBlobClient(): AppendBlobClient {\n return new AppendBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a BlockBlobClient object.\n *\n */\n public getBlockBlobClient(): BlockBlobClient {\n return new BlockBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Creates a PageBlobClient object.\n *\n */\n public getPageBlobClient(): PageBlobClient {\n return new PageBlobClient(this.url, this.pipeline);\n }\n\n /**\n * Reads or downloads a blob from the system, including its metadata and properties.\n * You can also call Get Blob to read a snapshot.\n *\n * * In Node.js, data returns in a Readable stream readableStreamBody\n * * In browsers, data returns in a promise blobBody\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob\n *\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Optional options to Blob Download operation.\n *\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody);\n * console.log(\"Downloaded blob content:\", downloaded.toString());\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * Example usage (browser):\n *\n * ```js\n * // Download and convert a blob to a string\n * const downloadBlockBlobResponse = await blobClient.download();\n * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody);\n * console.log(\n * \"Downloaded blob content\",\n * downloaded\n * );\n *\n * async function blobToString(blob: Blob): Promise {\n * const fileReader = new FileReader();\n * return new Promise((resolve, reject) => {\n * fileReader.onloadend = (ev: any) => {\n * resolve(ev.target!.result);\n * };\n * fileReader.onerror = reject;\n * fileReader.readAsText(blob);\n * });\n * }\n * ```\n */\n public async download(\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlobClient-download\", options);\n\n try {\n const res = await this.blobContext.download({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream\n },\n range: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedRes = {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n // Return browser response immediately\n if (!isNode) {\n return wrappedRes;\n }\n\n // We support retrying when download stream unexpected ends in Node.js runtime\n // Following code shouldn't be bundled into browser build, however some\n // bundlers may try to bundle following code and \"FileReadResponse.ts\".\n // In this case, \"FileDownloadResponse.browser.ts\" will be used as a shim of \"FileDownloadResponse.ts\"\n // The config is in package.json \"browser\" field\n if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) {\n // TODO: Default value or make it a required parameter?\n options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS;\n }\n\n if (res.contentLength === undefined) {\n throw new RangeError(`File download response doesn't contain valid content length header`);\n }\n\n if (!res.etag) {\n throw new RangeError(`File download response doesn't contain valid etag header`);\n }\n\n return new BlobDownloadResponse(\n wrappedRes,\n async (start: number): Promise => {\n const updatedDownloadOptions: BlobDownloadOptionalParams = {\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ifMatch: options.conditions!.ifMatch || res.etag,\n ifModifiedSince: options.conditions!.ifModifiedSince,\n ifNoneMatch: options.conditions!.ifNoneMatch,\n ifUnmodifiedSince: options.conditions!.ifUnmodifiedSince,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({\n count: offset + res.contentLength! - start,\n offset: start,\n }),\n rangeGetContentMD5: options.rangeGetContentMD5,\n rangeGetContentCRC64: options.rangeGetContentCrc64,\n snapshot: options.snapshot,\n cpkInfo: options.customerProvidedKey,\n };\n\n // Debug purpose only\n // console.log(\n // `Read from internal stream, range: ${\n // updatedOptions.range\n // }, options: ${JSON.stringify(updatedOptions)}`\n // );\n\n return (\n await this.blobContext.download({\n abortSignal: options.abortSignal,\n ...updatedDownloadOptions,\n })\n ).readableStreamBody!;\n },\n offset,\n res.contentLength!,\n {\n maxRetryRequests: options.maxRetryRequests,\n onProgress: options.onProgress,\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure blob resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing blob might be deleted by other clients or\n * applications. Vice versa new blobs might be added by other clients or applications after this\n * function completes.\n *\n * @param options - options to Exists operation.\n */\n public async exists(options: BlobExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-exists\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n await this.getProperties({\n abortSignal: options.abortSignal,\n customerProvidedKey: options.customerProvidedKey,\n conditions: options.conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n // Expected exception when checking blob existence\n return false;\n } else if (\n e.statusCode === 409 &&\n (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg ||\n e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)\n ) {\n // Expected exception when checking blob existence\n return true;\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns all user-defined metadata, standard HTTP properties, and system properties\n * for the blob. It does not return the content of the blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Optional options to Get Properties operation.\n */\n public async getProperties(\n options: BlobGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getProperties\", options);\n try {\n options.conditions = options.conditions || {};\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const res = await this.blobContext.getProperties({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n return {\n ...res,\n _response: res._response, // _response is made non-enumerable\n objectReplicationDestinationPolicyId: res.objectReplicationPolicyId,\n objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules),\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async delete(options: BlobDeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-delete\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.blobContext.delete({\n abortSignal: options.abortSignal,\n deleteSnapshots: options.deleteSnapshots,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param options - Optional options to Blob Delete operation.\n */\n public async deleteIfExists(\n options: BlobDeleteOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteIfExists\", options);\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a blob or snapshot only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restores the contents and metadata of soft deleted blob and any associated\n * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29\n * or later.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob\n *\n * @param options - Optional options to Blob Undelete operation.\n */\n public async undelete(options: BlobUndeleteOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-undelete\", options);\n try {\n return await this.blobContext.undelete({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets system properties on the blob.\n *\n * If no value provided, or no value provided for the specified blob HTTP headers,\n * these blob HTTP headers without a value will be cleared.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param blobHTTPHeaders - If no value provided, or no value provided for\n * the specified blob HTTP headers, these blob HTTP\n * headers without a value will be cleared.\n * A common header to set is `blobContentType`\n * enabling the browser to provide functionality\n * based on file type.\n * @param options - Optional options to Blob Set HTTP Headers operation.\n */\n public async setHTTPHeaders(\n blobHTTPHeaders?: BlobHTTPHeaders,\n options: BlobSetHTTPHeadersOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setHTTPHeaders\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setHttpHeaders({\n abortSignal: options.abortSignal,\n blobHttpHeaders: blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger.\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets user-defined metadata for the specified blob as one or more name-value pairs.\n *\n * If no option provided, or no metadata defined in the parameter, the blob\n * metadata will be removed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Optional options to Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: BlobSetMetadataOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setMetadata\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets tags on the underlying blob.\n * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters.\n * Valid tag key and value characters include lower and upper case letters, digits (0-9),\n * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_').\n *\n * @param tags -\n * @param options -\n */\n public async setTags(tags: Tags, options: BlobSetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setTags\", options);\n try {\n return await this.blobContext.setTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n tags: toBlobTags(tags),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the tags associated with the underlying blob.\n *\n * @param options -\n */\n public async getTags(options: BlobGetTagsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-getTags\", options);\n try {\n const response = await this.blobContext.getTags({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n const wrappedResponse: BlobGetTagsResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n tags: toTags({ blobTagSet: response.blobTagSet }) || {},\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the blob.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the blob.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a read-only snapshot of a blob.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob\n *\n * @param options - Optional options to the Blob Create Snapshot operation.\n */\n public async createSnapshot(\n options: BlobCreateSnapshotOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-createSnapshot\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blobContext.createSnapshot({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * This method returns a long running operation poller that allows you to wait\n * indefinitely until the copy is completed.\n * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller.\n * Note that the onProgress callback will not be invoked if the operation completes in the first\n * request, and attempting to cancel a completed copy will result in an error being thrown.\n *\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * Example using automatic polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using manual polling:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * while (!poller.isDone()) {\n * await poller.poll();\n * }\n * const result = copyPoller.getResult();\n * ```\n *\n * Example using progress updates:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * onProgress(state) {\n * console.log(`Progress: ${state.copyProgress}`);\n * }\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using a changing polling interval (default 15 seconds):\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url', {\n * intervalInMs: 1000 // poll blob every 1 second for copy progress\n * });\n * const result = await copyPoller.pollUntilDone();\n * ```\n *\n * Example using copy cancellation:\n *\n * ```js\n * const copyPoller = await blobClient.beginCopyFromURL('url');\n * // cancel operation after starting it.\n * try {\n * await copyPoller.cancelOperation();\n * // calls to get the result now throw PollerCancelledError\n * await copyPoller.getResult();\n * } catch (err) {\n * if (err.name === 'PollerCancelledError') {\n * console.log('The copy was cancelled.');\n * }\n * }\n * ```\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n public async beginCopyFromURL(\n copySource: string,\n options: BlobBeginCopyFromURLOptions = {}\n ): Promise<\n PollerLike, BlobBeginCopyFromURLResponse>\n > {\n const client: CopyPollerBlobClient = {\n abortCopyFromURL: (...args) => this.abortCopyFromURL(...args),\n getProperties: (...args) => this.getProperties(...args),\n startCopyFromURL: (...args) => this.startCopyFromURL(...args),\n };\n const poller = new BlobBeginCopyFromUrlPoller({\n blobClient: client,\n copySource,\n intervalInMs: options.intervalInMs,\n onProgress: options.onProgress,\n resumeFrom: options.resumeFrom,\n startCopyFromURLOptions: options,\n });\n\n // Trigger the startCopyFromURL call by calling poll.\n // Any errors from this method should be surfaced to the user.\n await poller.poll();\n\n return poller;\n }\n\n /**\n * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero\n * length and full metadata. Version 2012-02-12 and newer.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob\n *\n * @param copyId - Id of the Copy From URL operation.\n * @param options - Optional options to the Blob Abort Copy From URL operation.\n */\n public async abortCopyFromURL(\n copyId: string,\n options: BlobAbortCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-abortCopyFromURL\", options);\n try {\n return await this.blobContext.abortCopyFromURL(copyId, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not\n * return a response until the copy is complete.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url\n *\n * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication\n * @param options -\n */\n public async syncCopyFromURL(\n copySource: string,\n options: BlobSyncCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-syncCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.copyFromURL(copySource, {\n abortSignal: options.abortSignal,\n metadata: options.metadata,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n sourceContentMD5: options.sourceContentMD5,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n blobTagsString: toBlobTagsString(options.tags),\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n encryptionScope: options.encryptionScope,\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the tier on a blob. The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier\n *\n * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive.\n * @param options - Optional options to the Blob Set Tier operation.\n */\n public async setAccessTier(\n tier: BlockBlobTier | PremiumPageBlobTier | string,\n options: BlobSetTierOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setAccessTier\", options);\n try {\n return await this.blobContext.setTier(toAccessTier(tier)!, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n rehydratePriority: options.rehydratePriority,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level function\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob in parallel to a buffer.\n * Offset and count are optional, downloads the entire blob if they are not provided.\n *\n * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two\n * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size,\n * consider {@link downloadToFile}.\n *\n * @param buffer - Buffer to be fill, must have length larger than count\n * @param offset - From which position of the block blob to download(in bytes)\n * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined\n * @param options - BlobDownloadToBufferOptions\n */\n public async downloadToBuffer(\n buffer: Buffer,\n offset?: number,\n count?: number,\n options?: BlobDownloadToBufferOptions\n ): Promise;\n\n public async downloadToBuffer(\n param1?: Buffer | number,\n param2?: number,\n param3?: BlobDownloadToBufferOptions | number,\n param4: BlobDownloadToBufferOptions = {}\n ): Promise {\n let buffer: Buffer | undefined;\n let offset = 0;\n let count = 0;\n let options = param4;\n if (param1 instanceof Buffer) {\n buffer = param1;\n offset = param2 || 0;\n count = typeof param3 === \"number\" ? param3 : 0;\n } else {\n offset = typeof param1 === \"number\" ? param1 : 0;\n count = typeof param2 === \"number\" ? param2 : 0;\n options = (param3 as BlobDownloadToBufferOptions) || {};\n }\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToBuffer\", options);\n\n try {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0) {\n throw new RangeError(\"blockSize option must be >= 0\");\n }\n if (options.blockSize === 0) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n\n if (offset < 0) {\n throw new RangeError(\"offset option must be >= 0\");\n }\n\n if (count && count <= 0) {\n throw new RangeError(\"count option must be greater than 0\");\n }\n\n if (!options.conditions) {\n options.conditions = {};\n }\n\n // Customer doesn't specify length, get it\n if (!count) {\n const response = await this.getProperties({\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n count = response.contentLength! - offset;\n if (count < 0) {\n throw new RangeError(\n `offset ${offset} shouldn't be larger than blob size ${response.contentLength!}`\n );\n }\n }\n\n // Allocate the buffer of size = count if the buffer is not provided\n if (!buffer) {\n try {\n buffer = Buffer.alloc(count);\n } catch (error: any) {\n throw new Error(\n `Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the \"downloadToBuffer\" method or try using other methods like \"download\" or \"downloadToFile\".\\t ${error.message}`\n );\n }\n }\n\n if (buffer.length < count) {\n throw new RangeError(\n `The buffer's size should be equal to or larger than the request count of bytes: ${count}`\n );\n }\n\n let transferProgress: number = 0;\n const batch = new Batch(options.concurrency);\n for (let off = offset; off < offset + count; off = off + options.blockSize) {\n batch.addOperation(async () => {\n // Exclusive chunk end position\n let chunkEnd = offset + count!;\n if (off + options.blockSize! < chunkEnd) {\n chunkEnd = off + options.blockSize!;\n }\n const response = await this.download(off, chunkEnd - off, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n maxRetryRequests: options.maxRetryRequestsPerBlock,\n customerProvidedKey: options.customerProvidedKey,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n const stream = response.readableStreamBody!;\n await streamToBuffer(stream, buffer!, off - offset, chunkEnd - offset);\n // Update progress after block is downloaded, in case of block trying\n // Could provide finer grained progress updating inside HTTP requests,\n // only if convenience layer download try is enabled\n transferProgress += chunkEnd - off;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n });\n }\n await batch.do();\n return buffer;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Downloads an Azure Blob to a local file.\n * Fails if the the given file path already exits.\n * Offset and count are optional, pass 0 and undefined respectively to download the entire blob.\n *\n * @param filePath -\n * @param offset - From which position of the block blob to download.\n * @param count - How much data to be downloaded. Will download to the end when passing undefined.\n * @param options - Options to Blob download options.\n * @returns The response data for blob download operation,\n * but with readableStreamBody set to undefined since its\n * content is already read and written into a local file\n * at the specified path.\n */\n public async downloadToFile(\n filePath: string,\n offset: number = 0,\n count?: number,\n options: BlobDownloadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-downloadToFile\", options);\n try {\n const response = await this.download(offset, count, {\n ...options,\n tracingOptions: {\n ...options.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n if (response.readableStreamBody) {\n await readStreamToLocalFile(response.readableStreamBody, filePath);\n }\n\n // The stream is no longer accessible so setting it to undefined.\n (response as any).blobDownloadStream = undefined;\n return response;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n private getBlobAndContainerNamesFromUrl(): { blobName: string; containerName: string } {\n let containerName;\n let blobName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob`\n // http://localhost:10001/devstoreaccount1/containername/blob\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob\n // .getPath() -> /devstoreaccount1/containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)/([^/]*)(/(.*))?\");\n containerName = pathComponents![2];\n blobName = pathComponents![4];\n } else {\n // \"https://customdomain.com/containername/blob\".\n // .getPath() -> /containername/blob\n const pathComponents = parsedUrl.getPath()!.match(\"/([^/]*)(/(.*))?\");\n containerName = pathComponents![1];\n blobName = pathComponents![3];\n }\n\n // decode the encoded blobName, containerName - to get all the special characters that might be present in them\n containerName = decodeURIComponent(containerName);\n blobName = decodeURIComponent(blobName);\n\n // Azure Storage Server will replace \"\\\" with \"/\" in the blob names\n // doing the same in the SDK side so that the user doesn't have to replace \"\\\" instances in the blobName\n blobName = blobName.replace(/\\\\/g, \"/\");\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return { blobName, containerName };\n } catch (error: any) {\n throw new Error(\"Unable to extract blobName and containerName with provided information.\");\n }\n }\n\n /**\n * Asynchronously copies a blob to a destination within the storage account.\n * In version 2012-02-12 and later, the source for a Copy Blob operation can be\n * a committed blob in any Azure storage account.\n * Beginning with version 2015-02-21, the source for a Copy Blob operation can be\n * an Azure file in any Azure storage account.\n * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob\n * operation to copy from another storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob\n *\n * @param copySource - url to the source Azure Blob/File.\n * @param options - Optional options to the Blob Start Copy From URL operation.\n */\n private async startCopyFromURL(\n copySource: string,\n options: BlobStartCopyFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-startCopyFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n\n try {\n return await this.blobContext.startCopyFromURL(copySource, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions.tagConditions,\n },\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n rehydratePriority: options.rehydratePriority,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n sealBlob: options.sealBlob,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Only available for BlobClient constructed with a shared key credential.\n *\n * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: BlobGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n blobName: this._name,\n snapshotTime: this._snapshot,\n versionId: this._versionId,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Delete the immutablility policy on the blob.\n *\n * @param options - Optional options to delete immutability policy on the blob.\n */\n public async deleteImmutabilityPolicy(\n options?: BlobDeleteImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-deleteImmutabilityPolicy\", options);\n try {\n return await this.blobContext.deleteImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set immutablility policy on the blob.\n *\n * @param options - Optional options to set immutability policy on the blob.\n */\n public async setImmutabilityPolicy(\n immutabilityPolicy: BlobImmutabilityPolicy,\n options?: BlobSetImmutabilityPolicyOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setImmutabilityPolicy\", options);\n try {\n return await this.blobContext.setImmutabilityPolicy({\n abortSignal: options?.abortSignal,\n immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn,\n immutabilityPolicyMode: immutabilityPolicy.policyMode,\n modifiedAccessConditions: options?.modifiedAccessCondition,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Set legal hold on the blob.\n *\n * @param options - Optional options to set legal hold on the blob.\n */\n public async setLegalHold(\n legalHoldEnabled: boolean,\n options?: BlobSetLegalHoldOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobClient-setLegalHold\", options);\n try {\n return await this.blobContext.setLegalHold(legalHoldEnabled, {\n abortSignal: options?.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link AppendBlobClient.create} operation.\n */\nexport interface AppendBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Conditions to meet when creating append blobs.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when creating append blobs. A common header\n * to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * HTTP headers to set when creating append blobs. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n}\n\n/**\n * Options to configure {@link AppendBlobClient.seal} operation.\n */\nexport interface AppendBlobSealOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet.\n */\n conditions?: AppendBlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlock} operation.\n */\nexport interface AppendBlobAppendBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Callback to receive events on the progress of append block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation.\n */\nexport interface AppendBlobAppendBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when appending append blob blocks.\n */\n conditions?: AppendBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the append block content from the URI.\n * This hash is used to verify the integrity of the append block during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link appendBlobClient.createIfNotExists} operation.\n */\nexport interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * AppendBlobClient defines a set of operations applicable to append blobs.\n */\nexport class AppendBlobClient extends BlobClient {\n /**\n * appendBlobsContext provided by protocol layer.\n */\n private appendBlobContext: AppendBlob;\n\n /**\n *\n * Creates an instance of AppendBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of AppendBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to an append blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage append blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString;\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n // The second parameter is undefined. Use anonymous credential.\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.appendBlobContext = new AppendBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new AppendBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): AppendBlobClient {\n return new AppendBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options - Options to the Append Block Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const appendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await appendBlobClient.create();\n * ```\n */\n public async create(options: AppendBlobCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-create\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.create(0, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a 0-length append blob. Call AppendBlock to append data to an append blob.\n * If the blob with the same name already exists, the content of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param options -\n */\n public async createIfNotExists(\n options: AppendBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-createIfNotExists\", options);\n const conditions = { ifNoneMatch: ETagAny };\n try {\n const res = await this.create({\n ...updatedOptions,\n conditions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Seals the append blob, making it read only.\n *\n * @param options -\n */\n public async seal(options: AppendBlobSealOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-seal\", options);\n options.conditions = options.conditions || {};\n try {\n return await this.appendBlobContext.seal({\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Commits a new block of data to the end of the existing append blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/append-block\n *\n * @param body - Data to be appended.\n * @param contentLength - Length of the body in bytes.\n * @param options - Options to the Append Block operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello World!\";\n *\n * // Create a new append blob and append data to the blob.\n * const newAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await newAppendBlobClient.create();\n * await newAppendBlobClient.appendBlock(content, content.length);\n *\n * // Append data to an existing append blob.\n * const existingAppendBlobClient = containerClient.getAppendBlobClient(\"\");\n * await existingAppendBlobClient.appendBlock(content, content.length);\n * ```\n */\n public async appendBlock(\n body: HttpRequestBody,\n contentLength: number,\n options: AppendBlobAppendBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlock\", options);\n options.conditions = options.conditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlock(contentLength, body, {\n abortSignal: options.abortSignal,\n appendPositionAccessConditions: options.conditions,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Append Block operation commits a new block of data to the end of an existing append blob\n * where the contents are read from a source url.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url\n *\n * @param sourceURL -\n * The url to the blob that will be the source of the copy. A source blob in the same storage account can\n * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob\n * must either be public or must be authenticated via a shared access signature. If the source blob is\n * public, no authentication is required to perform the operation.\n * @param sourceOffset - Offset in source to be appended\n * @param count - Number of bytes to be appended as a block\n * @param options -\n */\n public async appendBlockFromURL(\n sourceURL: string,\n sourceOffset: number,\n count: number,\n options: AppendBlobAppendBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"AppendBlobClient-appendBlockFromURL\", options);\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, {\n abortSignal: options.abortSignal,\n sourceRange: rangeToString({ offset: sourceOffset, count }),\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n appendPositionAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure {@link BlockBlobClient.upload} operation.\n */\nexport interface BlockBlobUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when uploading to a block blob. A common header to set is\n * `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when uploading to a block blob.\n */\n metadata?: Metadata;\n /**\n * Callback to receive events on the progress of upload operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation.\n */\nexport interface BlockBlobSyncUploadFromURLOptions extends CommonOptions {\n /**\n * Server timeout in seconds.\n * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations\n */\n timeoutInSeconds?: number;\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value\n * pairs are specified, the operation will copy the metadata from the source blob or file to the\n * destination blob. If one or more name-value pairs are specified, the destination blob is\n * created with the specified metadata, and metadata is not copied from the source blob or file.\n * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules\n * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more\n * information.\n */\n metadata?: Metadata;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n /**\n * Specify the md5 calculated for the range of bytes that must be read from the copy source.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * Blob tags.\n */\n tags?: Tags;\n /**\n * Optional, default is true. Indicates if properties from the source blob should be copied.\n */\n copySourceBlobProperties?: boolean;\n /**\n * HTTP headers to set when uploading to a block blob.\n *\n * A common header to set is `blobContentType`, enabling the browser to provide functionality\n * based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * Conditions to meet for the destination Azure Blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Conditions to meet for the source Azure Blob.\n */\n sourceConditions?: ModifiedAccessConditions;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n /**\n * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}.\n */\n copySourceTags?: BlobCopySourceTags;\n}\n\n/**\n * Blob query error type.\n */\nexport interface BlobQueryError {\n /**\n * Whether error is fatal. Fatal error will stop query.\n */\n isFatal: boolean;\n /**\n * Error name.\n */\n name: string;\n /**\n * Position in bytes of the query.\n */\n position: number;\n /**\n * Error description.\n */\n description: string;\n}\n\n/**\n * Options to query blob with JSON format.\n */\nexport interface BlobQueryJsonTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a JSON format blob.\n */\n kind: \"json\";\n}\n\n/**\n * Options to query blob with CSV format.\n */\nexport interface BlobQueryCsvTextConfiguration {\n /**\n * Record separator.\n */\n recordSeparator: string;\n /**\n * Query for a CSV format blob.\n */\n kind: \"csv\";\n /**\n * Column separator. Default is \",\".\n */\n columnSeparator?: string;\n /**\n * Field quote.\n */\n fieldQuote?: string;\n /**\n * Escape character.\n */\n escapeCharacter?: string;\n /**\n * Has headers. Default is false.\n */\n hasHeaders?: boolean;\n}\n\n/**\n * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}.\n */\nexport interface BlobQueryArrowConfiguration {\n /**\n * Kind.\n */\n kind: \"arrow\";\n\n /**\n * List of {@link BlobQueryArrowField} describing the schema of the data.\n */\n schema: BlobQueryArrowField[];\n}\n\n/**\n * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}.\n */\nexport interface BlobQueryParquetConfiguration {\n /**\n * Kind.\n */\n kind: \"parquet\";\n}\n\n/**\n * Options to configure {@link BlockBlobClient.query} operation.\n */\nexport interface BlockBlobQueryOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Configurations for the query input.\n */\n inputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryParquetConfiguration;\n /**\n * Configurations for the query output.\n */\n outputTextConfiguration?:\n | BlobQueryJsonTextConfiguration\n | BlobQueryCsvTextConfiguration\n | BlobQueryArrowConfiguration;\n /**\n * Callback to receive events on the progress of query operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * Callback to receive error events during the query operaiton.\n */\n onError?: (error: BlobQueryError) => void;\n /**\n * Conditions to meet when uploading to the block blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlock} operation.\n */\nexport interface BlockBlobStageBlockOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * Callback to receive events on the progress of stage block operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n\n /**\n * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation.\n */\nexport interface BlockBlobStageBlockFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the bytes of the source Blob/File to upload.\n * If not specified, the entire content is uploaded as a single block.\n */\n range?: Range;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.commitBlockList} operation.\n */\nexport interface BlockBlobCommitBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when committing the block list.\n */\n conditions?: BlobRequestConditions;\n /**\n * HTTP headers to set when committing block list.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when committing block list.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure {@link BlockBlobClient.getBlockList} operation.\n */\nexport interface BlockBlobGetBlockListOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions & TagConditions;\n}\n\n/**\n * Option interface for the {@link BlockBlobClient.uploadStream} operation.\n */\nexport interface BlockBlobUploadStreamOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Blob HTTP Headers.\n *\n * A common header to set is `blobContentType`, enabling the\n * browser to provide functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n/**\n * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}.\n */\nexport interface BlockBlobParallelUploadOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Destination block blob size in bytes.\n */\n blockSize?: number;\n\n /**\n * Blob size threshold in bytes to start concurrency uploading.\n * Default value is 256MB, blob size less than this option will\n * be uploaded via one I/O operation without concurrency.\n * You can customize a value less equal than the default value.\n */\n maxSingleShotSize?: number;\n\n /**\n * Progress updater.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n\n /**\n * Blob HTTP Headers. A common header to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n\n /**\n * Metadata of block blob.\n */\n metadata?: { [propertyName: string]: string };\n\n /**\n * Access conditions headers.\n */\n conditions?: BlobRequestConditions;\n\n /**\n * Concurrency of parallel uploading. Must be greater than or equal to 0.\n */\n concurrency?: number;\n\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n\n /**\n * Blob tags.\n */\n tags?: Tags;\n\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: BlockBlobTier | string;\n}\n\n/**\n * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and\n * {@link BlockBlobClient.uploadBrowserDate}.\n */\nexport type BlobUploadCommonResponse = BlockBlobUploadHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse;\n};\n\n/**\n * BlockBlobClient defines a set of operations applicable to block blobs.\n */\nexport class BlockBlobClient extends BlobClient {\n /**\n * blobContext provided by protocol layer.\n *\n * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API\n * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient.\n */\n private _blobContext: StorageBlob;\n\n /**\n * blockBlobContext provided by protocol layer.\n */\n private blockBlobContext: BlockBlob;\n\n /**\n *\n * Creates an instance of BlockBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlockBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a block blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage block blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.blockBlobContext = new BlockBlob(this.storageClientContext);\n this._blobContext = new StorageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new BlockBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a URL to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): BlockBlobClient {\n return new BlockBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Quick query for a JSON or CSV formatted blob.\n *\n * Example usage (Node.js):\n *\n * ```js\n * // Query and convert a blob to a string\n * const queryBlockBlobResponse = await blockBlobClient.query(\"select * from BlobStorage\");\n * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString();\n * console.log(\"Query blob content:\", downloaded);\n *\n * async function streamToBuffer(readableStream) {\n * return new Promise((resolve, reject) => {\n * const chunks = [];\n * readableStream.on(\"data\", (data) => {\n * chunks.push(data instanceof Buffer ? data : Buffer.from(data));\n * });\n * readableStream.on(\"end\", () => {\n * resolve(Buffer.concat(chunks));\n * });\n * readableStream.on(\"error\", reject);\n * });\n * }\n * ```\n *\n * @param query -\n * @param options -\n */\n public async query(\n query: string,\n options: BlockBlobQueryOptions = {}\n ): Promise {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-query\", options);\n\n try {\n if (!isNode) {\n throw new Error(\"This operation currently is only supported in Node.js.\");\n }\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n const response = await this._blobContext.query({\n abortSignal: options.abortSignal,\n queryRequest: {\n queryType: \"SQL\",\n expression: query,\n inputSerialization: toQuerySerialization(options.inputTextConfiguration),\n outputSerialization: toQuerySerialization(options.outputTextConfiguration),\n },\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n return new BlobQueryResponse(response, {\n abortSignal: options.abortSignal,\n onProgress: options.onProgress,\n onError: options.onError,\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link stageBlock} and {@link commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link uploadFile},\n * {@link uploadStream} or {@link uploadBrowserData} for better performance\n * with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to the Block Blob Upload operation.\n * @returns Response data for the Block Blob Upload operation.\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public async upload(\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-upload\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.upload(contentLength, body, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new Block Blob where the contents of the blob are read from a given URL.\n * This API is supported beginning with the 2020-04-08 version. Partial updates\n * are not supported with Put Blob from URL; the content of an existing blob is overwritten with\n * the content of the new blob. To perform partial updates to a block blob’s contents using a\n * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}.\n *\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Optional parameters.\n */\n\n public async syncUploadFromURL(\n sourceURL: string,\n options: BlockBlobSyncUploadFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-syncUploadFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, {\n ...options,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions?.ifMatch,\n sourceIfModifiedSince: options.sourceConditions?.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions?.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions?.ifUnmodifiedSince,\n sourceIfTags: options.sourceConditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n copySourceTags: options.copySourceTags,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Uploads the specified block to the block blob's \"staging area\" to be later\n * committed by a call to commitBlockList.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param body - Data to upload to the staging area.\n * @param contentLength - Number of bytes to upload.\n * @param options - Options to the Block Blob Stage Block operation.\n * @returns Response data for the Block Blob Stage Block operation.\n */\n public async stageBlock(\n blockId: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobStageBlockOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlock\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlock(blockId, contentLength, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Stage Block From URL operation creates a new block to be committed as part\n * of a blob where the contents are read from a URL.\n * This API is available starting in version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url\n *\n * @param blockId - A 64-byte value that is base64-encoded\n * @param sourceURL - Specifies the URL of the blob. The value\n * may be a URL of up to 2 KB in length that specifies a blob.\n * The value should be URL-encoded as it would appear\n * in a request URI. The source blob must either be public\n * or must be authenticated via a shared access signature.\n * If the source blob is public, no authentication is required\n * to perform the operation. Here are some examples of source object URLs:\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob\n * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param offset - From which position of the blob to download, greater than or equal to 0\n * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined\n * @param options - Options to the Block Blob Stage Block From URL operation.\n * @returns Response data for the Block Blob Stage Block From URL operation.\n */\n public async stageBlockFromURL(\n blockId: string,\n sourceURL: string,\n offset: number = 0,\n count?: number,\n options: BlockBlobStageBlockFromURLOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-stageBlockFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }),\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes a blob by specifying the list of block IDs that make up the blob.\n * In order to be written as part of a blob, a block must have been successfully written\n * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to\n * update a blob by uploading only those blocks that have changed, then committing the new and existing\n * blocks together. Any blocks not specified in the block list and permanently deleted.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list\n *\n * @param blocks - Array of 64-byte value that is base64-encoded\n * @param options - Options to the Block Blob Commit Block List operation.\n * @returns Response data for the Block Blob Commit Block List operation.\n */\n public async commitBlockList(\n blocks: string[],\n options: BlockBlobCommitBlockListOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-commitBlockList\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.blockBlobContext.commitBlockList(\n { latest: blocks },\n {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of blocks that have been uploaded as part of a block blob\n * using the specified block list filter.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list\n *\n * @param listType - Specifies whether to return the list of committed blocks,\n * the list of uncommitted blocks, or both lists together.\n * @param options - Options to the Block Blob Get Block List operation.\n * @returns Response data for the Block Blob Get Block List operation.\n */\n public async getBlockList(\n listType: BlockListType,\n options: BlockBlobGetBlockListOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-getBlockList\", options);\n try {\n const res = await this.blockBlobContext.getBlockList(listType, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n if (!res.committedBlocks) {\n res.committedBlocks = [];\n }\n\n if (!res.uncommittedBlocks) {\n res.uncommittedBlocks = [];\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n // High level functions\n\n /**\n * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob.\n *\n * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView\n * @param options -\n */\n public async uploadData(\n data: Buffer | Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadData\", options);\n try {\n if (isNode) {\n let buffer: Buffer;\n if (data instanceof Buffer) {\n buffer = data;\n } else if (data instanceof ArrayBuffer) {\n buffer = Buffer.from(data);\n } else {\n data = data as ArrayBufferView;\n buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength);\n }\n\n return this.uploadSeekableInternal(\n (offset: number, size: number): Buffer => buffer.slice(offset, offset + size),\n buffer.byteLength,\n updatedOptions\n );\n } else {\n const browserBlob = new Blob([data]);\n return this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n }\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN BROWSERS.\n *\n * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob.\n *\n * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call\n * {@link commitBlockList} to commit the block list.\n *\n * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is\n * `blobContentType`, enabling the browser to provide\n * functionality based on file type.\n *\n * @deprecated Use {@link uploadData} instead.\n *\n * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView\n * @param options - Options to upload browser data.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadBrowserData(\n browserData: Blob | ArrayBuffer | ArrayBufferView,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadBrowserData\", options);\n try {\n const browserBlob = new Blob([browserData]);\n return await this.uploadSeekableInternal(\n (offset: number, size: number): Blob => browserBlob.slice(offset, offset + size),\n browserBlob.size,\n updatedOptions\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n *\n * Uploads data to block blob. Requires a bodyFactory as the data source,\n * which need to return a {@link HttpRequestBody} object with the offset and size provided.\n *\n * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is\n * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload.\n * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList}\n * to commit the block list.\n *\n * @param bodyFactory -\n * @param size - size of the data to upload.\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n private async uploadSeekableInternal(\n bodyFactory: (offset: number, size: number) => HttpRequestBody,\n size: number,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n if (!options.blockSize) {\n options.blockSize = 0;\n }\n if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) {\n throw new RangeError(\n `blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`\n );\n }\n\n if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) {\n options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES;\n }\n if (\n options.maxSingleShotSize < 0 ||\n options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES\n ) {\n throw new RangeError(\n `maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`\n );\n }\n\n if (options.blockSize === 0) {\n if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(`${size} is too larger to upload to a block blob.`);\n }\n if (size > options.maxSingleShotSize) {\n options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS);\n if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) {\n options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES;\n }\n }\n }\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadSeekableInternal\", options);\n\n try {\n if (size <= options.maxSingleShotSize) {\n return await this.upload(bodyFactory(0, size), size, updatedOptions);\n }\n\n const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;\n if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {\n throw new RangeError(\n `The buffer's size is too big or the BlockSize is too small;` +\n `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`\n );\n }\n\n const blockList: string[] = [];\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n\n const batch = new Batch(options.concurrency);\n for (let i = 0; i < numBlocks; i++) {\n batch.addOperation(async (): Promise => {\n const blockID = generateBlockID(blockIDPrefix, i);\n const start = options.blockSize! * i;\n const end = i === numBlocks - 1 ? size : start + options.blockSize!;\n const contentLength = end - start;\n blockList.push(blockID);\n await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, {\n abortSignal: options.abortSignal,\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n // Update progress after block is successfully uploaded to server, in case of block trying\n // TODO: Hook with convenience layer progress event in finer level\n transferProgress += contentLength;\n if (options.onProgress) {\n options.onProgress!({\n loadedBytes: transferProgress,\n });\n }\n });\n }\n await batch.do();\n\n return this.commitBlockList(blockList, updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a local file in blocks to a block blob.\n *\n * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload.\n * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList\n * to commit the block list.\n *\n * @param filePath - Full path of local file\n * @param options - Options to Upload to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadFile(\n filePath: string,\n options: BlockBlobParallelUploadOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadFile\", options);\n try {\n const size = (await fsStat(filePath)).size;\n return await this.uploadSeekableInternal(\n (offset, count) => {\n return () =>\n fsCreateReadStream(filePath, {\n autoClose: true,\n end: count ? offset + count - 1 : Infinity,\n start: offset,\n });\n },\n size,\n {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Uploads a Node.js Readable stream into block blob.\n *\n * PERFORMANCE IMPROVEMENT TIPS:\n * * Input stream highWaterMark is better to set a same value with bufferSize\n * parameter, which will avoid Buffer.concat() operations.\n *\n * @param stream - Node.js Readable stream\n * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB\n * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated,\n * positive correlation with max uploading concurrency. Default value is 5\n * @param options - Options to Upload Stream to Block Blob operation.\n * @returns Response data for the Blob Upload operation.\n */\n public async uploadStream(\n stream: Readable,\n bufferSize: number = DEFAULT_BLOCK_BUFFER_SIZE_BYTES,\n maxConcurrency: number = 5,\n options: BlockBlobUploadStreamOptions = {}\n ): Promise {\n if (!options.blobHTTPHeaders) {\n options.blobHTTPHeaders = {};\n }\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BlockBlobClient-uploadStream\", options);\n\n try {\n let blockNum = 0;\n const blockIDPrefix = generateUuid();\n let transferProgress: number = 0;\n const blockList: string[] = [];\n\n const scheduler = new BufferScheduler(\n stream,\n bufferSize,\n maxConcurrency,\n async (body, length) => {\n const blockID = generateBlockID(blockIDPrefix, blockNum);\n blockList.push(blockID);\n blockNum++;\n\n await this.stageBlock(blockID, body, length, {\n conditions: options.conditions,\n encryptionScope: options.encryptionScope,\n tracingOptions: updatedOptions.tracingOptions,\n });\n\n // Update progress after block is successfully uploaded to server, in case of block trying\n transferProgress += length;\n if (options.onProgress) {\n options.onProgress({ loadedBytes: transferProgress });\n }\n },\n // concurrency should set a smaller value than maxConcurrency, which is helpful to\n // reduce the possibility when a outgoing handler waits for stream data, in\n // this situation, outgoing handlers are blocked.\n // Outgoing queue shouldn't be empty.\n Math.ceil((maxConcurrency / 4) * 3)\n );\n await scheduler.do();\n\n return await this.commitBlockList(blockList, {\n ...options,\n tracingOptions: {\n ...options!.tracingOptions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n },\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Options to configure the {@link PageBlobClient.create} operation.\n */\nexport interface PageBlobCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when creating a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n /**\n * Blob tags.\n */\n tags?: Tags;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A user-controlled value that can be used to track requests.\n * The value must be between 0 and 2^63 - 1. The default value is 0.\n */\n blobSequenceNumber?: number;\n /**\n * HTTP headers to set when creating a page blob.\n */\n blobHTTPHeaders?: BlobHTTPHeaders;\n /**\n * A collection of key-value string pair to associate with the blob when creating append blobs.\n */\n metadata?: Metadata;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Optional. Specifies immutability policy for a blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n immutabilityPolicy?: BlobImmutabilityPolicy;\n /**\n * Optional. Indicates if a legal hold should be placed on the blob.\n * Note that is parameter is only applicable to a blob within a container that\n * has version level worm enabled.\n */\n legalHold?: boolean;\n /**\n * Access tier.\n * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers\n */\n tier?: PremiumPageBlobTier | string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.uploadPages} operation.\n */\nexport interface PageBlobUploadPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when uploading pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Callback to receive events on the progress of upload pages operation.\n */\n onProgress?: (progress: TransferProgressEvent) => void;\n /**\n * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport.\n * When this is specified, the storage service compares the hash of the content that has arrived with this value.\n *\n * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time.\n */\n transactionalContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.clearPages} operation.\n */\nexport interface PageBlobClearPagesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when clearing pages.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getPageRanges} operation.\n */\nexport interface PageBlobGetPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure page blob - get page ranges segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesSegment}\n * - {@link PageBlobClient.listPageRangeItemSegments}\n * - {@link PageBlobClient.listPageRangeItems}\n */\ninterface PageBlobListPageRangesSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRanges} operation.\n */\nexport interface PageBlobListPageRangesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.getRangesDiff} operation.\n */\nexport interface PageBlobGetPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n /**\n * (unused)\n */\n range?: string;\n}\n\n/**\n * Options to configure page blob - get page ranges diff segment operations.\n *\n * See:\n * - {@link PageBlobClient.listPageRangesDiffSegment}\n * - {@link PageBlobClient.listPageRangeDiffItemSegments}\n * - {@link PageBlobClient.listPageRangeDiffItems}\n */\ninterface PageBlobListPageRangesDiffSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges.\n */\n conditions?: BlobRequestConditions;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation.\n */\nexport interface PageBlobListPageRangesDiffOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when getting page ranges diff.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.resize} operation.\n */\nexport interface PageBlobResizeOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when resizing a page blob.\n */\n conditions?: BlobRequestConditions;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n}\n\n/**\n * Options to configure {@link PageBlobClient.updateSequenceNumber} operation.\n */\nexport interface PageBlobUpdateSequenceNumberOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: BlobRequestConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.startCopyIncremental} operation.\n */\nexport interface PageBlobStartCopyIncrementalOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when starting a copy incremental operation.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation.\n */\nexport interface PageBlobUploadPagesFromURLOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when updating sequence number.\n */\n conditions?: PageBlobRequestConditions;\n /**\n * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob.\n */\n sourceConditions?: MatchConditions & ModificationConditions;\n /**\n * An MD5 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentMD5?: Uint8Array;\n /**\n * A CRC64 hash of the content from the URI.\n * This hash is used to verify the integrity of the content during transport of the data from the URI.\n * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value.\n *\n * sourceContentMD5 and sourceContentCrc64 cannot be set at same time.\n */\n sourceContentCrc64?: Uint8Array;\n /**\n * Customer Provided Key Info.\n */\n customerProvidedKey?: CpkInfo;\n /**\n * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to\n * encrypt the data provided in the request. If not specified, encryption is performed with the\n * default account encryption scope. For more information, see Encryption at Rest for Azure\n * Storage Services.\n */\n encryptionScope?: string;\n /**\n * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source.\n */\n sourceAuthorization?: HttpAuthorization;\n}\n\n/**\n * Contains response data for the {@link PageBlobClient.createIfNotExists} operation.\n */\nexport interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse {\n /**\n * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * PageBlobClient defines a set of operations applicable to page blobs.\n */\nexport class PageBlobClient extends BlobClient {\n /**\n * pageBlobsContext provided by protocol layer.\n */\n private pageBlobContext: PageBlob;\n\n /**\n *\n * Creates an instance of PageBlobClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param blobName - Blob name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n connectionString: string,\n containerName: string,\n blobName: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A Client string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of PageBlobClient.\n *\n * @param url - A URL string pointing to Azure Storage page blob, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob\".\n * You can append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString\".\n * This method accepts an encoded URL or non-encoded URL pointing to a blob.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * However, if a blob name includes ? or %, blob name must be encoded in the URL.\n * Such as a blob named \"my?blob%\", the URL should be \"https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n blobNameOrOptions?: string | StoragePipelineOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead.\n // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options);\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n options = blobNameOrOptions as StoragePipelineOptions;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\" &&\n blobNameOrOptions &&\n typeof blobNameOrOptions === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n const blobName = blobNameOrOptions;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)),\n encodeURIComponent(blobName)\n ) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName and blobName parameters\");\n }\n super(url, pipeline);\n this.pageBlobContext = new PageBlob(this.storageClientContext);\n }\n\n /**\n * Creates a new PageBlobClient object identical to the source but with the\n * specified snapshot timestamp.\n * Provide \"\" will remove the snapshot and return a Client to the base blob.\n *\n * @param snapshot - The snapshot timestamp.\n * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp.\n */\n public withSnapshot(snapshot: string): PageBlobClient {\n return new PageBlobClient(\n setURLParameter(\n this.url,\n URLConstants.Parameters.SNAPSHOT,\n snapshot.length === 0 ? undefined : snapshot\n ),\n this.pipeline\n );\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options - Options to the Page Blob Create operation.\n * @returns Response data for the Page Blob Create operation.\n */\n public async create(\n size: number,\n options: PageBlobCreateOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-create\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.create(0, size, {\n abortSignal: options.abortSignal,\n blobHttpHeaders: options.blobHTTPHeaders,\n blobSequenceNumber: options.blobSequenceNumber,\n leaseAccessConditions: options.conditions,\n metadata: options.metadata,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n immutabilityPolicyExpiry: options.immutabilityPolicy?.expiriesOn,\n immutabilityPolicyMode: options.immutabilityPolicy?.policyMode,\n legalHold: options.legalHold,\n tier: toAccessTier(options.tier),\n blobTagsString: toBlobTagsString(options.tags),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a page blob of the specified length. Call uploadPages to upload data\n * data to a page blob. If the blob with the same name already exists, the content\n * of the existing blob will remain unchanged.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param size - size of the page blob.\n * @param options -\n */\n public async createIfNotExists(\n size: number,\n options: PageBlobCreateIfNotExistsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-createIfNotExists\", options);\n try {\n const conditions = { ifNoneMatch: ETagAny };\n const res = await this.create(size, {\n ...options,\n conditions,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"BlobAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when creating a blob only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param body - Data to upload\n * @param offset - Offset of destination page blob\n * @param count - Content length of the body, also number of bytes to be uploaded\n * @param options - Options to the Page Blob Upload Pages operation.\n * @returns Response data for the Page Blob Upload Pages operation.\n */\n public async uploadPages(\n body: HttpRequestBody,\n offset: number,\n count: number,\n options: PageBlobUploadPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPages\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPages(count, body, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n requestOptions: {\n onUploadProgress: options.onProgress,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n transactionalContentMD5: options.transactionalContentMD5,\n transactionalContentCrc64: options.transactionalContentCrc64,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Upload Pages operation writes a range of pages to a page blob where the\n * contents are read from a URL.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url\n *\n * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication\n * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob\n * @param destOffset - Offset of destination page blob\n * @param count - Number of bytes to be uploaded from source page blob\n * @param options -\n */\n public async uploadPagesFromURL(\n sourceURL: string,\n sourceOffset: number,\n destOffset: number,\n count: number,\n options: PageBlobUploadPagesFromURLOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n options.sourceConditions = options.sourceConditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-uploadPagesFromURL\", options);\n try {\n ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps);\n return await this.pageBlobContext.uploadPagesFromURL(\n sourceURL,\n rangeToString({ offset: sourceOffset, count }),\n 0,\n rangeToString({ offset: destOffset, count }),\n {\n abortSignal: options.abortSignal,\n sourceContentMD5: options.sourceContentMD5,\n sourceContentCrc64: options.sourceContentCrc64,\n leaseAccessConditions: options.conditions,\n sequenceNumberAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n sourceModifiedAccessConditions: {\n sourceIfMatch: options.sourceConditions.ifMatch,\n sourceIfModifiedSince: options.sourceConditions.ifModifiedSince,\n sourceIfNoneMatch: options.sourceConditions.ifNoneMatch,\n sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince,\n },\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Frees the specified pages from the page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/put-page\n *\n * @param offset - Starting byte position of the pages to clear.\n * @param count - Number of bytes to clear.\n * @param options - Options to the Page Blob Clear Pages operation.\n * @returns Response data for the Page Blob Clear Pages operation.\n */\n public async clearPages(\n offset: number = 0,\n count?: number,\n options: PageBlobClearPagesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-clearPages\", options);\n try {\n return await this.pageBlobContext.clearPages(0, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n sequenceNumberAccessConditions: options.conditions,\n cpkInfo: options.customerProvidedKey,\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns the list of valid page ranges for a page blob or snapshot of a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns Response data for the Page Blob Get Ranges operation.\n */\n public async getPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobGetPageRangesOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRanges\", options);\n try {\n return await this.pageBlobContext\n .getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesSegment returns a single segment of page ranges starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to PageBlob Get Page Ranges Segment operation.\n */\n private async listPageRangesSegment(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesSegment\", options);\n try {\n return await this.pageBlobContext.getPageRanges({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n range: rangeToString({ offset, count }),\n marker: marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel}\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItemSegments(\n offset: number = 0,\n count?: number,\n marker?: string,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesSegment(\n offset,\n count,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to List Page Ranges operation.\n */\n private async *listPageRangeItems(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeItemSegments(\n offset,\n count,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges for a page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges for a page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRanges()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRanges();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRanges(\n offset: number = 0,\n count?: number,\n options: PageBlobListPageRangesOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeItems(offset, count, options);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeItemSegments(offset, count, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...options,\n });\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiff\", options);\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshot,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * getPageRangesDiffSegment returns a single segment of page ranges starting from the\n * specified Marker for difference between previous snapshot and the target page blob.\n * Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call getPageRangesDiffSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of the get to be returned with the next get operation.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async listPageRangesDiffSegment(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-getPageRangesDiffSegment\", options);\n try {\n return await this.pageBlobContext.getPageRangesDiff({\n abortSignal: options?.abortSignal,\n leaseAccessConditions: options?.conditions,\n modifiedAccessConditions: {\n ...options?.conditions,\n ifTags: options?.conditions?.tagConditions,\n },\n prevsnapshot: prevSnapshotOrUrl,\n range: rangeToString({\n offset: offset,\n count: count,\n }),\n marker: marker,\n maxPageSize: options?.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n /**\n * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel}\n *\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param marker - A string value that identifies the portion of\n * the get of page ranges to be returned with the next getting operation. The\n * operation returns the ContinuationToken value within the response body if the\n * getting operation did not return all page ranges remaining within the current page.\n * The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of get\n * items. The marker value is opaque to the client.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItemSegments(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n marker?: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let getPageRangeItemSegmentsResponse;\n if (!!marker || marker === undefined) {\n do {\n getPageRangeItemSegmentsResponse = await this.listPageRangesDiffSegment(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n );\n marker = getPageRangeItemSegmentsResponse.continuationToken;\n yield await getPageRangeItemSegmentsResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects\n *\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n */\n private async *listPageRangeDiffItems(\n offset: number,\n count: number,\n prevSnapshotOrUrl: string,\n options?: PageBlobListPageRangesDiffSegmentOptions\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const getPageRangesSegment of this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshotOrUrl,\n marker,\n options\n )) {\n yield* ExtractPageRangeInfoItems(getPageRangesSegment);\n }\n }\n\n /**\n * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the pageBlobClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\").getPageBlobClient(\"\");`\n * let i = 1;\n * for await (const pageRange of pageBlobClient.listPageRangesDiff()) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = pageBlobClient.listPageRangesDiff();\n * let pageRangeItem = await iter.next();\n * while (!pageRangeItem.done) {\n * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`);\n * pageRangeItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) {\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 page ranges\n * for (const pageRange of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 page ranges\n * for (const blob of response) {\n * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`);\n * }\n * ```\n * @param offset - Starting byte position of the page ranges.\n * @param count - Number of bytes to get.\n * @param prevSnapshot - Timestamp of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Ranges operation.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listPageRangesDiff(\n offset: number,\n count: number,\n prevSnapshot: string,\n options: PageBlobListPageRangesDiffOptions = {}\n ): PagedAsyncIterableIterator {\n options.conditions = options.conditions || {};\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, {\n ...options,\n });\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listPageRangeDiffItemSegments(\n offset,\n count,\n prevSnapshot,\n settings.continuationToken,\n {\n maxPageSize: settings.maxPageSize,\n ...options,\n }\n );\n },\n };\n }\n\n /**\n * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks.\n * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges\n *\n * @param offset - Starting byte position of the page blob\n * @param count - Number of bytes to get ranges diff.\n * @param prevSnapshotUrl - URL of snapshot to retrieve the difference.\n * @param options - Options to the Page Blob Get Page Ranges Diff operation.\n * @returns Response data for the Page Blob Get Page Range Diff operation.\n */\n public async getPageRangesDiffForManagedDisks(\n offset: number,\n count: number,\n prevSnapshotUrl: string,\n options: PageBlobGetPageRangesDiffOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\n \"PageBlobClient-GetPageRangesDiffForManagedDisks\",\n options\n );\n\n try {\n return await this.pageBlobContext\n .getPageRangesDiff({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n prevSnapshotUrl,\n range: rangeToString({ offset, count }),\n ...convertTracingToRequestOptionsBase(updatedOptions),\n })\n .then(rangeResponseFromModel);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Resizes the page blob to the specified size (which must be a multiple of 512).\n * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties\n *\n * @param size - Target size\n * @param options - Options to the Page Blob Resize operation.\n * @returns Response data for the Page Blob Resize operation.\n */\n public async resize(\n size: number,\n options: PageBlobResizeOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-resize\", options);\n try {\n return await this.pageBlobContext.resize(size, {\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n encryptionScope: options.encryptionScope,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets a page blob's sequence number.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties\n *\n * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number.\n * @param sequenceNumber - Required if sequenceNumberAction is max or update\n * @param options - Options to the Page Blob Update Sequence Number operation.\n * @returns Response data for the Page Blob Update Sequence Number operation.\n */\n public async updateSequenceNumber(\n sequenceNumberAction: SequenceNumberActionType,\n sequenceNumber?: number,\n options: PageBlobUpdateSequenceNumberOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"PageBlobClient-updateSequenceNumber\", options);\n try {\n return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, {\n abortSignal: options.abortSignal,\n blobSequenceNumber: sequenceNumber,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob.\n * The snapshot is copied such that only the differential changes between the previously\n * copied snapshot are transferred to the destination.\n * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual.\n * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob\n * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots\n *\n * @param copySource - Specifies the name of the source page blob snapshot. For example,\n * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot=\n * @param options - Options to the Page Blob Copy Incremental operation.\n * @returns Response data for the Page Blob Copy Incremental operation.\n */\n public async startCopyIncremental(\n copySource: string,\n options: PageBlobStartCopyIncrementalOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"PageBlobClient-startCopyIncremental\", options);\n try {\n return await this.pageBlobContext.copyIncremental(copySource, {\n abortSignal: options.abortSignal,\n modifiedAccessConditions: {\n ...options.conditions,\n ifTags: options.conditions?.tagConditions,\n },\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport { streamToBuffer2 } from \"./utils/utils.node\";\nimport { BATCH_MAX_PAYLOAD_IN_BYTES } from \"./utils/constants\";\n\nexport async function getBodyAsText(\n batchResponse: ServiceSubmitBatchResponseModel\n): Promise {\n let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES);\n\n const responseLength = await streamToBuffer2(\n batchResponse.readableStreamBody as NodeJS.ReadableStream,\n buffer\n );\n\n // Slice the buffer to trim the empty ending.\n buffer = buffer.slice(0, responseLength);\n\n return buffer.toString();\n}\n\nexport function utf8ByteLength(str: string): number {\n return Buffer.byteLength(str);\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { HttpHeaders } from \"@azure/core-http\";\n\nimport { ServiceSubmitBatchResponseModel } from \"./generatedModels\";\nimport {\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n HeaderConstants,\n HTTPURLConnection,\n} from \"./utils/constants\";\nimport { getBodyAsText } from \"./BatchUtils\";\nimport { BatchSubRequest } from \"./BlobBatch\";\nimport { BatchSubResponse, ParsedBatchResponse } from \"./BatchResponse\";\nimport { logger } from \"./log\";\n\nconst HTTP_HEADER_DELIMITER = \": \";\nconst SPACE_DELIMITER = \" \";\nconst NOT_FOUND = -1;\n\n/**\n * Util class for parsing batch response.\n */\nexport class BatchResponseParser {\n private readonly batchResponse: ServiceSubmitBatchResponseModel;\n private readonly responseBatchBoundary: string;\n private readonly perResponsePrefix: string;\n private readonly batchResponseEnding: string;\n private readonly subRequests: Map;\n\n constructor(\n batchResponse: ServiceSubmitBatchResponseModel,\n subRequests: Map\n ) {\n if (!batchResponse || !batchResponse.contentType) {\n // In special case(reported), server may return invalid content-type which could not be parsed.\n throw new RangeError(\"batchResponse is malformed or doesn't contain valid content-type.\");\n }\n\n if (!subRequests || subRequests.size === 0) {\n // This should be prevent during coding.\n throw new RangeError(\"Invalid state: subRequests is not provided or size is 0.\");\n }\n\n this.batchResponse = batchResponse;\n this.subRequests = subRequests;\n this.responseBatchBoundary = this.batchResponse.contentType!.split(\"=\")[1];\n this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`;\n this.batchResponseEnding = `--${this.responseBatchBoundary}--`;\n }\n\n // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response\n public async parseBatchResponse(): Promise {\n // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse\n // sub request's response.\n if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) {\n throw new Error(\n `Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`\n );\n }\n\n const responseBodyAsText = await getBodyAsText(this.batchResponse);\n\n const subResponses = responseBodyAsText\n .split(this.batchResponseEnding)[0] // string after ending is useless\n .split(this.perResponsePrefix)\n .slice(1); // string before first response boundary is useless\n const subResponseCount = subResponses.length;\n\n // Defensive coding in case of potential error parsing.\n // Note: subResponseCount == 1 is special case where sub request is invalid.\n // We try to prevent such cases through early validation, e.g. validate sub request count >= 1.\n // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user.\n if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) {\n throw new Error(\"Invalid state: sub responses' count is not equal to sub requests' count.\");\n }\n\n const deserializedSubResponses: Array = new Array(subResponseCount);\n let subResponsesSucceededCount: number = 0;\n let subResponsesFailedCount: number = 0;\n\n // Parse sub subResponses.\n for (let index = 0; index < subResponseCount; index++) {\n const subResponse = subResponses[index];\n const deserializedSubResponse = {} as BatchSubResponse;\n deserializedSubResponse.headers = new HttpHeaders();\n\n const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`);\n let subRespHeaderStartFound = false;\n let subRespHeaderEndFound = false;\n let subRespFailed = false;\n let contentId = NOT_FOUND;\n\n for (const responseLine of responseLines) {\n if (!subRespHeaderStartFound) {\n // Convention line to indicate content ID\n if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) {\n contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]);\n }\n\n // Http version line with status code indicates the start of sub request's response.\n // Example: HTTP/1.1 202 Accepted\n if (responseLine.startsWith(HTTP_VERSION_1_1)) {\n subRespHeaderStartFound = true;\n\n const tokens = responseLine.split(SPACE_DELIMITER);\n deserializedSubResponse.status = parseInt(tokens[1]);\n deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER);\n }\n\n continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: *\n }\n\n if (responseLine.trim() === \"\") {\n // Sub response's header start line already found, and the first empty line indicates header end line found.\n if (!subRespHeaderEndFound) {\n subRespHeaderEndFound = true;\n }\n\n continue; // Skip empty line\n }\n\n // Note: when code reach here, it indicates subRespHeaderStartFound == true\n if (!subRespHeaderEndFound) {\n if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) {\n // Defensive coding to prevent from missing valuable lines.\n throw new Error(\n `Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`\n );\n }\n\n // Parse headers of sub response.\n const tokens = responseLine.split(HTTP_HEADER_DELIMITER);\n deserializedSubResponse.headers.set(tokens[0], tokens[1]);\n if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) {\n deserializedSubResponse.errorCode = tokens[1];\n subRespFailed = true;\n }\n } else {\n // Assemble body of sub response.\n if (!deserializedSubResponse.bodyAsText) {\n deserializedSubResponse.bodyAsText = \"\";\n }\n\n deserializedSubResponse.bodyAsText += responseLine;\n }\n } // Inner for end\n\n // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking.\n // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it\n // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that\n // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose.\n if (\n contentId !== NOT_FOUND &&\n Number.isInteger(contentId) &&\n contentId >= 0 &&\n contentId < this.subRequests.size &&\n deserializedSubResponses[contentId] === undefined\n ) {\n deserializedSubResponse._request = this.subRequests.get(contentId)!;\n deserializedSubResponses[contentId] = deserializedSubResponse;\n } else {\n logger.error(\n `subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`\n );\n }\n\n if (subRespFailed) {\n subResponsesFailedCount++;\n } else {\n subResponsesSucceededCount++;\n }\n }\n\n return {\n subResponses: deserializedSubResponses,\n subResponsesSucceededCount: subResponsesSucceededCount,\n subResponsesFailedCount: subResponsesFailedCount,\n };\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nenum MutexLockStatus {\n LOCKED,\n UNLOCKED,\n}\n\ntype Callback = (...args: any[]) => any;\n\n/**\n * An async mutex lock.\n */\nexport class Mutex {\n /**\n * Lock for a specific key. If the lock has been acquired by another customer, then\n * will wait until getting the lock.\n *\n * @param key - lock key\n */\n public static async lock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n } else {\n this.onUnlockEvent(key, () => {\n this.keys[key] = MutexLockStatus.LOCKED;\n resolve();\n });\n }\n });\n }\n\n /**\n * Unlock a key.\n *\n * @param key -\n */\n public static async unlock(key: string): Promise {\n return new Promise((resolve) => {\n if (this.keys[key] === MutexLockStatus.LOCKED) {\n this.emitUnlockEvent(key);\n }\n delete this.keys[key];\n resolve();\n });\n }\n\n private static keys: { [key: string]: MutexLockStatus } = {};\n private static listeners: { [key: string]: Callback[] } = {};\n\n private static onUnlockEvent(key: string, handler: Callback) {\n if (this.listeners[key] === undefined) {\n this.listeners[key] = [handler];\n } else {\n this.listeners[key].push(handler);\n }\n }\n\n private static emitUnlockEvent(key: string) {\n if (this.listeners[key] !== undefined && this.listeners[key].length > 0) {\n const handler = this.listeners[key].shift();\n setImmediate(() => {\n handler!.call(this);\n });\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n BaseRequestPolicy,\n deserializationPolicy,\n generateUuid,\n HttpHeaders,\n HttpOperationResponse,\n RequestPolicy,\n RequestPolicyFactory,\n RequestPolicyOptions,\n WebResource,\n TokenCredential,\n isTokenCredential,\n bearerTokenAuthenticationPolicy,\n isNode,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobClient, BlobDeleteOptions, BlobSetTierOptions } from \"./Clients\";\nimport { AccessTier } from \"./generatedModels\";\nimport { Mutex } from \"./utils/Mutex\";\nimport { Pipeline } from \"./Pipeline\";\nimport { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from \"./utils/utils.common\";\nimport {\n HeaderConstants,\n BATCH_MAX_REQUEST,\n HTTP_VERSION_1_1,\n HTTP_LINE_ENDING,\n StorageOAuthScopes,\n} from \"./utils/constants\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { createSpan } from \"./utils/tracing\";\n\n/**\n * A request associated with a batch operation.\n */\nexport interface BatchSubRequest {\n /**\n * The URL of the resource to request operation.\n */\n url: string;\n\n /**\n * The credential used for sub request.\n * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service.\n * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n}\n\n/**\n * A BlobBatch represents an aggregated set of operations on blobs.\n * Currently, only `delete` and `setAccessTier` are supported.\n */\nexport class BlobBatch {\n private batchRequest: InnerBatchRequest;\n private readonly batch: string = \"batch\";\n private batchType: \"delete\" | \"setAccessTier\" | undefined;\n\n constructor() {\n this.batchRequest = new InnerBatchRequest();\n }\n\n /**\n * Get the value of Content-Type for a batch request.\n * The value must be multipart/mixed with a batch boundary.\n * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252\n */\n public getMultiPartContentType(): string {\n return this.batchRequest.getMultipartContentType();\n }\n\n /**\n * Get assembled HTTP request body for sub requests.\n */\n public getHttpRequestBody(): string {\n return this.batchRequest.getHttpRequestBody();\n }\n\n /**\n * Get sub requests that are added into the batch request.\n */\n public getSubRequests(): Map {\n return this.batchRequest.getSubRequests();\n }\n\n private async addSubRequestInternal(\n subRequest: BatchSubRequest,\n assembleSubRequestFunc: () => Promise\n ): Promise {\n await Mutex.lock(this.batch);\n\n try {\n this.batchRequest.preAddSubRequest(subRequest);\n await assembleSubRequestFunc();\n this.batchRequest.postAddSubRequest(subRequest);\n } finally {\n await Mutex.unlock(this.batch);\n }\n }\n\n private setBatchType(batchType: \"delete\" | \"setAccessTier\"): void {\n if (!this.batchType) {\n this.batchType = batchType;\n }\n if (this.batchType !== batchType) {\n throw new RangeError(\n `BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`\n );\n }\n }\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlob(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * The deleteBlob operation marks the specified blob or snapshot for deletion.\n * The blob is later deleted during garbage collection.\n * Only one kind of operation is allowed per batch request.\n *\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param options -\n */\n public async deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise;\n\n public async deleteBlob(\n urlOrBlobClient: string | BlobClient,\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n options?: BlobDeleteOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrOptions instanceof StorageSharedKeyCredential) ||\n credentialOrOptions instanceof AnonymousCredential ||\n isTokenCredential(credentialOrOptions))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrOptions;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n options = credentialOrOptions as BlobDeleteOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchDeleteRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"delete\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param url - The url of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n url: string,\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * The setBlobAccessTier operation sets the tier on a blob.\n * The operation is allowed on block blobs in a blob storage or general purpose v2 account.\n * Only one kind of operation is allowed per batch request.\n *\n * A block blob's tier determines Hot/Cool/Archive storage type.\n * This operation does not update the blob's ETag.\n * For detailed information about block blob level tiering\n * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers).\n * The operation will be authenticated and authorized\n * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClient - The BlobClient.\n * @param tier -\n * @param options -\n */\n public async setBlobAccessTier(\n blobClient: BlobClient,\n tier: AccessTier,\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobAccessTier(\n urlOrBlobClient: string | BlobClient,\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n options?: BlobSetTierOptions\n ): Promise {\n let url: string;\n let credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential;\n let tier: AccessTier;\n\n if (\n typeof urlOrBlobClient === \"string\" &&\n ((isNode && credentialOrTier instanceof StorageSharedKeyCredential) ||\n credentialOrTier instanceof AnonymousCredential ||\n isTokenCredential(credentialOrTier))\n ) {\n // First overload\n url = urlOrBlobClient;\n credential = credentialOrTier as\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential;\n tier = tierOrOptions as AccessTier;\n } else if (urlOrBlobClient instanceof BlobClient) {\n // Second overload\n url = urlOrBlobClient.url;\n credential = urlOrBlobClient.credential;\n tier = credentialOrTier as AccessTier;\n options = tierOrOptions as BlobSetTierOptions;\n } else {\n throw new RangeError(\n \"Invalid arguments. Either url and credential, or BlobClient need be provided.\"\n );\n }\n\n if (!options) {\n options = {};\n }\n\n const { span, updatedOptions } = createSpan(\"BatchSetTierRequest-addSubRequest\", options);\n\n try {\n this.setBatchType(\"setAccessTier\");\n await this.addSubRequestInternal(\n {\n url: url,\n credential: credential,\n },\n async () => {\n await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(\n tier,\n updatedOptions\n );\n }\n );\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n\n/**\n * Inner batch request class which is responsible for assembling and serializing sub requests.\n * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled.\n */\nclass InnerBatchRequest {\n private operationCount: number;\n private body: string;\n private subRequests: Map;\n private readonly boundary: string;\n private readonly subRequestPrefix: string;\n private readonly multipartContentType: string;\n private readonly batchRequestEnding: string;\n\n constructor() {\n this.operationCount = 0;\n this.body = \"\";\n\n const tempGuid = generateUuid();\n\n // batch_{batchid}\n this.boundary = `batch_${tempGuid}`;\n // --batch_{batchid}\n // Content-Type: application/http\n // Content-Transfer-Encoding: binary\n this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;\n // multipart/mixed; boundary=batch_{batchid}\n this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;\n // --batch_{batchid}--\n this.batchRequestEnding = `--${this.boundary}--`;\n\n this.subRequests = new Map();\n }\n\n /**\n * Create pipeline to assemble sub requests. The idea here is to use existing\n * credential and serialization/deserialization components, with additional policies to\n * filter unnecessary headers, assemble sub requests into request's body\n * and intercept request from going to wire.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n */\n public createPipeline(\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential\n ): Pipeline {\n const isAnonymousCreds = credential instanceof AnonymousCredential;\n const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory]\n const factories: RequestPolicyFactory[] = new Array(policyFactoryLength);\n\n factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer\n factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers\n if (!isAnonymousCreds) {\n factories[2] = isTokenCredential(credential)\n ? attachCredential(\n bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes),\n credential\n )\n : credential;\n }\n factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire\n\n return new Pipeline(factories, {});\n }\n\n public appendSubRequestToBody(request: WebResource) {\n // Start to assemble sub request\n this.body += [\n this.subRequestPrefix, // sub request constant prefix\n `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID\n \"\", // empty line after sub request's content ID\n `${request.method.toString()} ${getURLPathAndQuery(\n request.url\n )} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method\n ].join(HTTP_LINE_ENDING);\n\n for (const header of request.headers.headersArray()) {\n this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`;\n }\n\n this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line\n // No body to assemble for current batch request support\n // End to assemble sub request\n }\n\n public preAddSubRequest(subRequest: BatchSubRequest) {\n if (this.operationCount >= BATCH_MAX_REQUEST) {\n throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`);\n }\n\n // Fast fail if url for sub request is invalid\n const path = getURLPath(subRequest.url);\n if (!path || path === \"\") {\n throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`);\n }\n }\n\n public postAddSubRequest(subRequest: BatchSubRequest) {\n this.subRequests.set(this.operationCount, subRequest);\n this.operationCount++;\n }\n\n // Return the http request body with assembling the ending line to the sub request body.\n public getHttpRequestBody(): string {\n return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`;\n }\n\n public getMultipartContentType(): string {\n return this.multipartContentType;\n }\n\n public getSubRequests(): Map {\n return this.subRequests;\n }\n}\n\nclass BatchRequestAssemblePolicy extends BaseRequestPolicy {\n private batchRequest: InnerBatchRequest;\n private readonly dummyResponse: HttpOperationResponse = {\n request: new WebResource(),\n status: 200,\n headers: new HttpHeaders(),\n };\n\n constructor(\n batchRequest: InnerBatchRequest,\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ) {\n super(nextPolicy, options);\n\n this.batchRequest = batchRequest;\n }\n\n public async sendRequest(request: WebResource): Promise {\n await this.batchRequest.appendSubRequestToBody(request);\n\n return this.dummyResponse; // Intercept request from going to wire\n }\n}\n\nclass BatchRequestAssemblePolicyFactory implements RequestPolicyFactory {\n private batchRequest: InnerBatchRequest;\n\n constructor(batchRequest: InnerBatchRequest) {\n this.batchRequest = batchRequest;\n }\n\n public create(\n nextPolicy: RequestPolicy,\n options: RequestPolicyOptions\n ): BatchRequestAssemblePolicy {\n return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options);\n }\n}\n\nclass BatchHeaderFilterPolicy extends BaseRequestPolicy {\n // The base class has a protected constructor. Adding a public one to enable constructing of this class.\n /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/\n constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions) {\n super(nextPolicy, options);\n }\n\n public async sendRequest(request: WebResource): Promise {\n let xMsHeaderName = \"\";\n\n for (const header of request.headers.headersArray()) {\n if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) {\n xMsHeaderName = header.name;\n }\n }\n\n if (xMsHeaderName !== \"\") {\n request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header.\n }\n\n return this._nextPolicy.sendRequest(request);\n }\n}\n\nclass BatchHeaderFilterPolicyFactory implements RequestPolicyFactory {\n public create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): BatchHeaderFilterPolicy {\n return new BatchHeaderFilterPolicy(nextPolicy, options);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport {\n AccessTier,\n ServiceSubmitBatchHeaders,\n ServiceSubmitBatchOptionalParamsModel,\n ServiceSubmitBatchResponseModel,\n} from \"./generatedModels\";\nimport { ParsedBatchResponse } from \"./BatchResponse\";\nimport { BatchResponseParser } from \"./BatchResponseParser\";\nimport { utf8ByteLength } from \"./BatchUtils\";\nimport { BlobBatch } from \"./BlobBatch\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { HttpResponse, TokenCredential } from \"@azure/core-http\";\nimport { Service, Container } from \"./generated/src/operations\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { BlobDeleteOptions, BlobClient, BlobSetTierOptions } from \"./Clients\";\nimport { StorageClientContext } from \"./generated/src/storageClientContext\";\nimport { PipelineLike, StoragePipelineOptions, newPipeline, isPipelineLike } from \"./Pipeline\";\nimport { getURLPath } from \"./utils/utils.common\";\n\n/**\n * Options to configure the Service - Submit Batch Optional Params.\n */\nexport interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel {}\n\n/**\n * Contains response data for blob batch operations.\n */\nexport declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse &\n ServiceSubmitBatchHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceSubmitBatchHeaders;\n };\n };\n\n/**\n * Contains response data for the {@link deleteBlobs} operation.\n */\nexport declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * Contains response data for the {@link setBlobsAccessTier} operation.\n */\nexport declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse;\n\n/**\n * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n */\nexport class BlobBatchClient {\n private serviceOrContainerContext: Service | Container;\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n\n /**\n * Creates an instance of BlobBatchClient.\n *\n * @param url - A url pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (!credentialOrPipeline) {\n // no credential provided\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n pipeline = newPipeline(credentialOrPipeline, options);\n }\n\n const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions());\n\n const path = getURLPath(url);\n if (path && path !== \"/\") {\n // Container scoped.\n this.serviceOrContainerContext = new Container(storageClientContext);\n } else {\n this.serviceOrContainerContext = new Service(storageClientContext);\n }\n }\n\n /**\n * Creates a {@link BlobBatch}.\n * A BlobBatch represents an aggregated set of operations on blobs.\n */\n public createBatch(): BlobBatch {\n return new BlobBatch();\n }\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operations will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resources to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options -\n */\n public async deleteBlobs(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n /**\n * Create multiple delete operations to mark the specified blobs or snapshots for deletion.\n * Note that in order to delete a blob, you must delete all of its snapshots.\n * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob).\n * The operation(subrequest) will be authenticated and authorized with specified credential.\n * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs to delete.\n * @param options -\n */\n public async deleteBlobs(\n blobClients: BlobClient[],\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise;\n\n public async deleteBlobs(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrOptions:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | BlobDeleteOptions\n | undefined,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobDeleteOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as TokenCredential, options);\n } else {\n await batch.deleteBlob(urlOrBlobClient, credentialOrOptions as BlobDeleteOptions);\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param urls - The urls of the blob resource to delete.\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n urls: string[],\n credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n /**\n * Create multiple set tier operations to set the tier on a blob.\n * The operation is allowed on a page blob in a premium\n * storage account and on a block blob in a blob storage account (locally redundant\n * storage only). A premium page blob's tier determines the allowed size, IOPS,\n * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive\n * storage type. This operation does not update the blob's ETag.\n * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier).\n * The operation(subrequest) will be authenticated and authorized\n * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization).\n *\n * @param blobClients - The BlobClients for the blobs which should have a new tier set.\n * @param tier -\n * @param options -\n */\n public async setBlobsAccessTier(\n blobClients: BlobClient[],\n tier: AccessTier,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise;\n\n public async setBlobsAccessTier(\n urlsOrBlobClients: string[] | BlobClient[],\n credentialOrTier:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | AccessTier,\n tierOrOptions?: AccessTier | BlobSetTierOptions,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: BlobSetTierOptions\n ): Promise {\n const batch = new BlobBatch();\n for (const urlOrBlobClient of urlsOrBlobClients) {\n if (typeof urlOrBlobClient === \"string\") {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as TokenCredential,\n tierOrOptions as AccessTier,\n options\n );\n } else {\n await batch.setBlobAccessTier(\n urlOrBlobClient,\n credentialOrTier as AccessTier,\n tierOrOptions as BlobSetTierOptions\n );\n }\n }\n return this.submitBatch(batch);\n }\n\n /**\n * Submit batch request which consists of multiple subrequests.\n *\n * Get `blobBatchClient` and other details before running the snippets.\n * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient`\n *\n * Example usage:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.deleteBlob(urlInString0, credential0);\n * await batchRequest.deleteBlob(urlInString1, credential1, {\n * deleteSnapshots: \"include\"\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * Example using a lease:\n *\n * ```js\n * let batchRequest = new BlobBatch();\n * await batchRequest.setBlobAccessTier(blockBlobClient0, \"Cool\");\n * await batchRequest.setBlobAccessTier(blockBlobClient1, \"Cool\", {\n * conditions: { leaseId: leaseId }\n * });\n * const batchResp = await blobBatchClient.submitBatch(batchRequest);\n * console.log(batchResp.subResponsesSucceededCount);\n * ```\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @param batchRequest - A set of Delete or SetTier operations.\n * @param options -\n */\n public async submitBatch(\n batchRequest: BlobBatch,\n options: BlobBatchSubmitBatchOptionalParams = {}\n ): Promise {\n if (!batchRequest || batchRequest.getSubRequests().size === 0) {\n throw new RangeError(\"Batch request should contain one or more sub requests.\");\n }\n\n const { span, updatedOptions } = createSpan(\"BlobBatchClient-submitBatch\", options);\n try {\n const batchRequestBody = batchRequest.getHttpRequestBody();\n\n // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now.\n const rawBatchResponse: ServiceSubmitBatchResponseModel =\n await this.serviceOrContainerContext.submitBatch(\n utf8ByteLength(batchRequestBody),\n batchRequest.getMultiPartContentType(),\n batchRequestBody,\n {\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202).\n const batchResponseParser = new BatchResponseParser(\n rawBatchResponse,\n batchRequest.getSubRequests()\n );\n const responseSummary = await batchResponseParser.parseBatchResponse();\n\n const res: BlobBatchSubmitBatchResponse = {\n _response: rawBatchResponse._response,\n contentType: rawBatchResponse.contentType,\n errorCode: rawBatchResponse.errorCode,\n requestId: rawBatchResponse.requestId,\n clientRequestId: rawBatchResponse.clientRequestId,\n version: rawBatchResponse.version,\n subResponses: responseSummary.subResponses,\n subResponsesSucceededCount: responseSummary.subResponsesSucceededCount,\n subResponsesFailedCount: responseSummary.subResponsesFailedCount,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n getDefaultProxySettings,\n HttpRequestBody,\n HttpResponse,\n isNode,\n isTokenCredential,\n TokenCredential,\n URLBuilder,\n} from \"@azure/core-http\";\nimport { PagedAsyncIterableIterator, PageSettings } from \"@azure/core-paging\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { Container } from \"./generated/src/operations\";\nimport {\n BlobDeleteResponse,\n BlobPrefix,\n BlobProperties,\n BlockBlobUploadResponse,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ContainerEncryptionScope,\n ContainerFilterBlobsHeaders,\n ContainerGetAccessPolicyHeaders,\n ContainerGetPropertiesResponse,\n ContainerListBlobFlatSegmentHeaders,\n ContainerListBlobHierarchySegmentHeaders,\n ContainerSetAccessPolicyResponse,\n ContainerSetMetadataResponse,\n FilterBlobItem,\n FilterBlobSegment,\n FilterBlobSegmentModel,\n LeaseAccessConditions,\n ListBlobsFlatSegmentResponseModel,\n ListBlobsHierarchySegmentResponseModel,\n PublicAccessType,\n SignedIdentifierModel,\n} from \"./generatedModels\";\nimport {\n Metadata,\n ObjectReplicationPolicy,\n Tags,\n ContainerRequestConditions,\n ModifiedAccessConditions,\n} from \"./models\";\nimport { newPipeline, PipelineLike, isPipelineLike, StoragePipelineOptions } from \"./Pipeline\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n BlobNameToString,\n ConvertInternalResponseOfListBlobFlat,\n ConvertInternalResponseOfListBlobHierarchy,\n extractConnectionStringParts,\n isIpEndpointStyle,\n parseObjectReplicationRecord,\n ProcessBlobItems,\n ProcessBlobPrefixes,\n toTags,\n truncatedISO8061Date,\n} from \"./utils/utils.common\";\nimport { ContainerSASPermissions } from \"./sas/ContainerSASPermissions\";\nimport { generateBlobSASQueryParameters } from \"./sas/BlobSASSignatureValues\";\nimport { BlobLeaseClient } from \"./BlobLeaseClient\";\nimport {\n AppendBlobClient,\n BlobClient,\n BlobDeleteOptions,\n BlockBlobClient,\n BlockBlobUploadOptions,\n CommonGenerateSasUrlOptions,\n PageBlobClient,\n} from \"./Clients\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { ListBlobsIncludeItem } from \"./generated/src\";\n\n/**\n * Options to configure {@link ContainerClient.create} operation.\n */\nexport interface ContainerCreateOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * A collection of key-value string pair to associate with the container.\n */\n metadata?: Metadata;\n /**\n * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include:\n * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account.\n * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request.\n */\n access?: PublicAccessType;\n /**\n * Container encryption scope info.\n */\n containerEncryptionScope?: ContainerEncryptionScope;\n}\n\n/**\n * Options to configure {@link ContainerClient.getProperties} operation.\n */\nexport interface ContainerGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.delete} operation.\n */\nexport interface ContainerDeleteMethodOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when deleting the container.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.exists} operation.\n */\nexport interface ContainerExistsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure {@link ContainerClient.setMetadata} operation.\n */\nexport interface ContainerSetMetadataOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure {@link ContainerClient.getAccessPolicy} operation.\n */\nexport interface ContainerGetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * If specified, contains the lease id that must be matched and lease with this id\n * must be active in order for the operation to succeed.\n */\n conditions?: LeaseAccessConditions;\n}\n\n/**\n * Signed identifier.\n */\nexport interface SignedIdentifier {\n /**\n * a unique id\n */\n id: string;\n /**\n * Access Policy\n */\n accessPolicy: {\n /**\n * Optional. The date-time the policy is active\n */\n startsOn?: Date;\n /**\n * Optional. The date-time the policy expires\n */\n expiresOn?: Date;\n /**\n * The permissions for the acl policy\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n */\n permissions?: string;\n };\n}\n\n/**\n * Contains response data for the {@link ContainerClient.getAccessPolicy} operation.\n */\nexport declare type ContainerGetAccessPolicyResponse = {\n signedIdentifiers: SignedIdentifier[];\n} & ContainerGetAccessPolicyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerGetAccessPolicyHeaders;\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: SignedIdentifierModel[];\n };\n };\n\n/**\n * Options to configure {@link ContainerClient.setAccessPolicy} operation.\n */\nexport interface ContainerSetAccessPolicyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when setting the access policy.\n */\n conditions?: ContainerRequestConditions;\n}\n\n/**\n * Options to configure Container - Acquire Lease operation.\n */\nexport interface ContainerAcquireLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when acquiring the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Release Lease operation.\n */\nexport interface ContainerReleaseLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when releasing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Renew Lease operation.\n */\nexport interface ContainerRenewLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when renewing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Break Lease operation.\n */\nexport interface ContainerBreakLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when breaking the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure Container - Change Lease operation.\n */\nexport interface ContainerChangeLeaseOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Conditions to meet when changing the lease.\n */\n conditions?: ModifiedAccessConditions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.deleteBlob} operation.\n */\nexport interface ContainerDeleteBlobOptions extends BlobDeleteOptions {\n /**\n * An opaque DateTime value that, when present, specifies the version\n * of the blob to delete. It's for service version 2019-10-10 and newer.\n */\n versionId?: string;\n}\n\n/**\n * Options to configure Container - List Segment operations.\n *\n * See:\n * - {@link ContainerClient.listSegments}\n * - {@link ContainerClient.listBlobFlatSegment}\n * - {@link ContainerClient.listBlobHierarchySegment}\n * - {@link ContainerClient.listHierarchySegments}\n * - {@link ContainerClient.listItemsByHierarchy}\n */\ninterface ContainerListBlobsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify one or more datasets to include in the response.\n */\n include?: ListBlobsIncludeItem[];\n}\n\n/**\n * An interface representing BlobHierarchyListSegment.\n */\nexport interface BlobHierarchyListSegment {\n blobPrefixes?: BlobPrefix[];\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsHierarchySegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n delimiter?: string;\n segment: BlobHierarchyListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobHierarchySegment operation.\n */\nexport type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse &\n ContainerListBlobHierarchySegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobHierarchySegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsHierarchySegmentResponseModel;\n };\n };\n\n/**\n * An Azure Storage blob\n */\nexport interface BlobItem {\n name: string;\n deleted: boolean;\n snapshot: string;\n versionId?: string;\n isCurrentVersion?: boolean;\n properties: BlobProperties;\n metadata?: { [propertyName: string]: string };\n tags?: Tags;\n objectReplicationSourceProperties?: ObjectReplicationPolicy[];\n hasVersionsOnly?: boolean;\n}\n\n/**\n * An interface representing BlobFlatListSegment.\n */\nexport interface BlobFlatListSegment {\n blobItems: BlobItem[];\n}\n\n/**\n * An enumeration of blobs\n */\nexport interface ListBlobsFlatSegmentResponse {\n serviceEndpoint: string;\n containerName: string;\n prefix?: string;\n marker?: string;\n maxPageSize?: number;\n segment: BlobFlatListSegment;\n continuationToken?: string;\n}\n\n/**\n * Contains response data for the listBlobFlatSegment operation.\n */\nexport type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse &\n ContainerListBlobFlatSegmentHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerListBlobFlatSegmentHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: ListBlobsFlatSegmentResponseModel;\n };\n };\n\n/**\n * Options to configure Container - List Blobs operations.\n *\n * See:\n * - {@link ContainerClient.listBlobsFlat}\n * - {@link ContainerClient.listBlobsByHierarchy}\n */\nexport interface ContainerListBlobsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n\n /**\n * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response.\n */\n includeCopy?: boolean;\n /**\n * Specifies whether soft deleted blobs should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether blob metadata be returned in the response.\n */\n includeMetadata?: boolean;\n /**\n * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response.\n */\n includeSnapshots?: boolean;\n /**\n * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response.\n */\n includeVersions?: boolean;\n /**\n * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response.\n */\n includeUncommitedBlobs?: boolean;\n /**\n * Specifies whether blob tags be returned in the response.\n */\n includeTags?: boolean;\n /**\n * Specifies whether deleted blob with versions be returned in the response.\n */\n includeDeletedWithVersions?: boolean;\n /**\n * Specifies whether blob immutability policy be returned in the response.\n */\n includeImmutabilityPolicy?: boolean;\n /**\n * Specifies whether blob legal hold be returned in the response.\n */\n includeLegalHold?: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.createIfNotExists} operation.\n */\nexport interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse {\n /**\n * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists.\n */\n succeeded: boolean;\n}\n\n/**\n * Contains response data for the {@link ContainerClient.deleteIfExists} operation.\n */\nexport interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse {\n /**\n * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place.\n */\n succeeded: boolean;\n}\n\n/**\n * Options to configure {@link ContainerClient.generateSasUrl} operation.\n */\nexport interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions {\n /**\n * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS.\n */\n permissions?: ContainerSASPermissions;\n}\n\n/**\n * Options to configure the {@link ContainerClient.findBlobsByTagsSegment} operation.\n */\ninterface ContainerFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ContainerFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ContainerFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ContainerFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs.\n */\nexport class ContainerClient extends StorageClient {\n /**\n * containerContext provided by protocol layer.\n */\n private containerContext: Container;\n\n private _containerName: string;\n\n /**\n * The name of the container.\n */\n public get containerName(): string {\n return this._containerName;\n }\n /**\n *\n * Creates an instance of ContainerClient.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param containerName - Container name.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions);\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of ContainerClient.\n * This method accepts an URL pointing to a container.\n * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped.\n * If a blob name includes ? or %, blob name must be encoded in the URL.\n *\n * @param url - A URL string pointing to Azure Storage container, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer\". You can\n * append a SAS if using AnonymousCredential, such as\n * \"https://myaccount.blob.core.windows.net/mycontainer?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n urlOrConnectionString: string,\n credentialOrPipelineOrContainerName?:\n | string\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n let url: string;\n options = options || {};\n if (isPipelineLike(credentialOrPipelineOrContainerName)) {\n // (url: string, pipeline: Pipeline)\n url = urlOrConnectionString;\n pipeline = credentialOrPipelineOrContainerName;\n } else if (\n (isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) ||\n credentialOrPipelineOrContainerName instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipelineOrContainerName)\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n url = urlOrConnectionString;\n pipeline = newPipeline(credentialOrPipelineOrContainerName, options);\n } else if (\n !credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName !== \"string\"\n ) {\n // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions)\n // The second parameter is undefined. Use anonymous credential.\n url = urlOrConnectionString;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else if (\n credentialOrPipelineOrContainerName &&\n typeof credentialOrPipelineOrContainerName === \"string\"\n ) {\n // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions)\n const containerName = credentialOrPipelineOrContainerName;\n\n const extractedCreds = extractConnectionStringParts(urlOrConnectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName));\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n pipeline = newPipeline(sharedKeyCredential, options);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n url =\n appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) +\n \"?\" +\n extractedCreds.accountSas;\n pipeline = newPipeline(new AnonymousCredential(), options);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n } else {\n throw new Error(\"Expecting non-empty strings for containerName parameter\");\n }\n super(url, pipeline);\n this._containerName = this.getContainerNameFromUrl();\n this.containerContext = new Container(this.storageClientContext);\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, the operation fails.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options - Options to Container Create operation.\n *\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * const createContainerResponse = await containerClient.create();\n * console.log(\"Container was created successfully\", createContainerResponse.requestId);\n * ```\n */\n public async create(options: ContainerCreateOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-create\", options);\n try {\n // Spread operator in destructuring assignments,\n // this will filter out unwanted properties from the response object into result object\n return await this.containerContext.create({\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a new container under the specified account. If the container with\n * the same name already exists, it is not changed.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container\n *\n * @param options -\n */\n public async createIfNotExists(\n options: ContainerCreateOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-createIfNotExists\", options);\n try {\n const res = await this.create(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerAlreadyExists\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message:\n \"Expected exception when creating a container only if it does not already exist.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns true if the Azure container resource represented by this client exists; false otherwise.\n *\n * NOTE: use this function with care since an existing container might be deleted by other clients or\n * applications. Vice versa new containers with the same name might be added by other clients or\n * applications after this function completes.\n *\n * @param options -\n */\n public async exists(options: ContainerExistsOptions = {}): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-exists\", options);\n try {\n await this.getProperties({\n abortSignal: options.abortSignal,\n tracingOptions: updatedOptions.tracingOptions,\n });\n return true;\n } catch (e: any) {\n if (e.statusCode === 404) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when checking container existence\",\n });\n return false;\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a {@link BlobClient}\n *\n * @param blobName - A blob name\n * @returns A new BlobClient object for the given blob name.\n */\n public getBlobClient(blobName: string): BlobClient {\n return new BlobClient(appendToURLPath(this.url, encodeURIComponent(blobName)), this.pipeline);\n }\n\n /**\n * Creates an {@link AppendBlobClient}\n *\n * @param blobName - An append blob name\n */\n public getAppendBlobClient(blobName: string): AppendBlobClient {\n return new AppendBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link BlockBlobClient}\n *\n * @param blobName - A block blob name\n *\n *\n * Example usage:\n *\n * ```js\n * const content = \"Hello world!\";\n *\n * const blockBlobClient = containerClient.getBlockBlobClient(\"\");\n * const uploadBlobResponse = await blockBlobClient.upload(content, content.length);\n * ```\n */\n public getBlockBlobClient(blobName: string): BlockBlobClient {\n return new BlockBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Creates a {@link PageBlobClient}\n *\n * @param blobName - A page blob name\n */\n public getPageBlobClient(blobName: string): PageBlobClient {\n return new PageBlobClient(\n appendToURLPath(this.url, encodeURIComponent(blobName)),\n this.pipeline\n );\n }\n\n /**\n * Returns all user-defined metadata and system properties for the specified\n * container. The data returned does not include the container's list of blobs.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties\n *\n * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if\n * they originally contained uppercase characters. This differs from the metadata keys returned by\n * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which\n * will retain their original casing.\n *\n * @param options - Options to Container Get Properties operation.\n */\n public async getProperties(\n options: ContainerGetPropertiesOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getProperties\", options);\n try {\n return await this.containerContext.getProperties({\n abortSignal: options.abortSignal,\n ...options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async delete(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-delete\", options);\n try {\n return await this.containerContext.delete({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified container for deletion if it exists. The container and any blobs\n * contained within it are later deleted during garbage collection.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container\n *\n * @param options - Options to Container Delete operation.\n */\n public async deleteIfExists(\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteIfExists\", options);\n\n try {\n const res = await this.delete(updatedOptions);\n return {\n succeeded: true,\n ...res,\n _response: res._response, // _response is made non-enumerable\n };\n } catch (e: any) {\n if (e.details?.errorCode === \"ContainerNotFound\") {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: \"Expected exception when deleting a container only if it exists.\",\n });\n return {\n succeeded: false,\n ...e.response?.parsedHeaders,\n _response: e.response,\n };\n }\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets one or more user-defined name-value pairs for the specified container.\n *\n * If no option provided, or no metadata defined in the parameter, the container\n * metadata will be removed.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata\n *\n * @param metadata - Replace existing metadata with this value.\n * If no value provided the existing metadata will be removed.\n * @param options - Options to Container Set Metadata operation.\n */\n public async setMetadata(\n metadata?: Metadata,\n options: ContainerSetMetadataOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n if (options.conditions.ifUnmodifiedSince) {\n throw new RangeError(\n \"the IfUnmodifiedSince must have their default values because they are ignored by the blob service\"\n );\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-setMetadata\", options);\n\n try {\n return await this.containerContext.setMetadata({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n metadata,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the permissions for the specified container. The permissions indicate\n * whether container data may be accessed publicly.\n *\n * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings.\n * For example, new Date(\"2018-12-31T03:44:23.8827891Z\").toISOString() will get \"2018-12-31T03:44:23.882Z\".\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl\n *\n * @param options - Options to Container Get Access Policy operation.\n */\n public async getAccessPolicy(\n options: ContainerGetAccessPolicyOptions = {}\n ): Promise {\n if (!options.conditions) {\n options.conditions = {};\n }\n\n const { span, updatedOptions } = createSpan(\"ContainerClient-getAccessPolicy\", options);\n\n try {\n const response = await this.containerContext.getAccessPolicy({\n abortSignal: options.abortSignal,\n leaseAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const res: ContainerGetAccessPolicyResponse = {\n _response: response._response,\n blobPublicAccess: response.blobPublicAccess,\n date: response.date,\n etag: response.etag,\n errorCode: response.errorCode,\n lastModified: response.lastModified,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n signedIdentifiers: [],\n version: response.version,\n };\n\n for (const identifier of response) {\n let accessPolicy: any = undefined;\n if (identifier.accessPolicy) {\n accessPolicy = {\n permissions: identifier.accessPolicy.permissions,\n };\n\n if (identifier.accessPolicy.expiresOn) {\n accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn);\n }\n\n if (identifier.accessPolicy.startsOn) {\n accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn);\n }\n }\n\n res.signedIdentifiers.push({\n accessPolicy,\n id: identifier.id,\n });\n }\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets the permissions for the specified container. The permissions indicate\n * whether blobs in a container may be accessed publicly.\n *\n * When you set permissions for a container, the existing permissions are replaced.\n * If no access or containerAcl provided, the existing container ACL will be\n * removed.\n *\n * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect.\n * During this interval, a shared access signature that is associated with the stored access policy will\n * fail with status code 403 (Forbidden), until the access policy becomes active.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl\n *\n * @param access - The level of public access to data in the container.\n * @param containerAcl - Array of elements each having a unique Id and details of the access policy.\n * @param options - Options to Container Set Access Policy operation.\n */\n public async setAccessPolicy(\n access?: PublicAccessType,\n containerAcl?: SignedIdentifier[],\n options: ContainerSetAccessPolicyOptions = {}\n ): Promise {\n options.conditions = options.conditions || {};\n const { span, updatedOptions } = createSpan(\"ContainerClient-setAccessPolicy\", options);\n try {\n const acl: SignedIdentifierModel[] = [];\n for (const identifier of containerAcl || []) {\n acl.push({\n accessPolicy: {\n expiresOn: identifier.accessPolicy.expiresOn\n ? truncatedISO8061Date(identifier.accessPolicy.expiresOn)\n : \"\",\n permissions: identifier.accessPolicy.permissions,\n startsOn: identifier.accessPolicy.startsOn\n ? truncatedISO8061Date(identifier.accessPolicy.startsOn)\n : \"\",\n },\n id: identifier.id,\n });\n }\n\n return await this.containerContext.setAccessPolicy({\n abortSignal: options.abortSignal,\n access,\n containerAcl: acl,\n leaseAccessConditions: options.conditions,\n modifiedAccessConditions: options.conditions,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Get a {@link BlobLeaseClient} that manages leases on the container.\n *\n * @param proposeLeaseId - Initial proposed lease Id.\n * @returns A new BlobLeaseClient object for managing leases on the container.\n */\n public getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient {\n return new BlobLeaseClient(this, proposeLeaseId);\n }\n\n /**\n * Creates a new block blob, or updates the content of an existing block blob.\n *\n * Updating an existing block blob overwrites any existing metadata on the blob.\n * Partial updates are not supported; the content of the existing blob is\n * overwritten with the new content. To perform a partial update of a block blob's,\n * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}.\n *\n * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile},\n * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better\n * performance with concurrency uploading.\n *\n * @see https://docs.microsoft.com/rest/api/storageservices/put-blob\n *\n * @param blobName - Name of the block blob to create or update.\n * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function\n * which returns a new Readable stream whose offset is from data source beginning.\n * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a\n * string including non non-Base64/Hex-encoded characters.\n * @param options - Options to configure the Block Blob Upload operation.\n * @returns Block Blob upload response data and the corresponding BlockBlobClient instance.\n */\n public async uploadBlockBlob(\n blobName: string,\n body: HttpRequestBody,\n contentLength: number,\n options: BlockBlobUploadOptions = {}\n ): Promise<{ blockBlobClient: BlockBlobClient; response: BlockBlobUploadResponse }> {\n const { span, updatedOptions } = createSpan(\"ContainerClient-uploadBlockBlob\", options);\n try {\n const blockBlobClient = this.getBlockBlobClient(blobName);\n const response = await blockBlobClient.upload(body, contentLength, updatedOptions);\n return {\n blockBlobClient,\n response,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Marks the specified blob or snapshot for deletion. The blob is later deleted\n * during garbage collection. Note that in order to delete a blob, you must delete\n * all of its snapshots. You can delete both at the same time with the Delete\n * Blob operation.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob\n *\n * @param blobName -\n * @param options - Options to Blob Delete operation.\n * @returns Block blob deletion response data.\n */\n public async deleteBlob(\n blobName: string,\n options: ContainerDeleteBlobOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-deleteBlob\", options);\n try {\n let blobClient = this.getBlobClient(blobName);\n if (options.versionId) {\n blobClient = blobClient.withVersion(options.versionId);\n }\n return await blobClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobFlatSegment returns a single segment of blobs starting from the\n * specified Marker. Use an empty Marker to start enumeration from the beginning.\n * After getting a segment, process it, and then call listBlobsFlatSegment again\n * (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Flat Segment operation.\n */\n private async listBlobFlatSegment(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-listBlobFlatSegment\", options);\n try {\n const response = await this.containerContext.listBlobFlatSegment({\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n const wrappedResponse: ContainerListBlobFlatSegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * listBlobHierarchySegment returns a single segment of blobs starting from\n * the specified Marker. Use an empty Marker to start enumeration from the\n * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment\n * again (passing the the previously-returned Marker) to get the next segment.\n * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of the list to be returned with the next list operation.\n * @param options - Options to Container List Blob Hierarchy Segment operation.\n */\n private async listBlobHierarchySegment(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"ContainerClient-listBlobHierarchySegment\",\n options\n );\n try {\n const response = await this.containerContext.listBlobHierarchySegment(delimiter, {\n marker,\n ...options,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n response.segment.blobItems = [];\n if ((response.segment as any)[\"Blob\"] !== undefined) {\n response.segment.blobItems = ProcessBlobItems((response.segment as any)[\"Blob\"]);\n }\n\n response.segment.blobPrefixes = [];\n if ((response.segment as any)[\"BlobPrefix\"] !== undefined) {\n response.segment.blobPrefixes = ProcessBlobPrefixes(\n (response.segment as any)[\"BlobPrefix\"]\n );\n }\n\n const wrappedResponse: ContainerListBlobHierarchySegmentResponse = {\n ...response,\n _response: {\n ...response._response,\n parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody),\n }, // _response is made non-enumerable\n segment: {\n ...response.segment,\n blobItems: response.segment.blobItems.map((blobItemInteral) => {\n const blobItem: BlobItem = {\n ...blobItemInteral,\n name: BlobNameToString(blobItemInteral.name),\n tags: toTags(blobItemInteral.blobTags),\n objectReplicationSourceProperties: parseObjectReplicationRecord(\n blobItemInteral.objectReplicationMetadata\n ),\n };\n return blobItem;\n }),\n blobPrefixes: response.segment.blobPrefixes?.map((blobPrefixInternal) => {\n const blobPrefix: BlobPrefix = {\n name: BlobNameToString(blobPrefixInternal.name),\n };\n return blobPrefix;\n }),\n },\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse\n *\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listSegments(\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsFlatSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsFlatSegmentResponse = await this.listBlobFlatSegment(marker, options);\n marker = listBlobsFlatSegmentResponse.continuationToken;\n yield await listBlobsFlatSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator of {@link BlobItem} objects\n *\n * @param options - Options to list blobs operation.\n */\n private async *listItems(\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const listBlobsFlatSegmentResponse of this.listSegments(marker, options)) {\n yield* listBlobsFlatSegmentResponse.segment.blobItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * // Get the containerClient before you run these snippets,\n * // Can be obtained from `blobServiceClient.getContainerClient(\"\");`\n * let i = 1;\n * for await (const blob of containerClient.listBlobsFlat()) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * let iter = containerClient.listBlobsFlat();\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) {\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n *\n * // Passing next marker as continuationToken\n *\n * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 blob names\n * for (const blob of response.segment.blobItems) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * @param options - Options to list blobs.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listBlobsFlat(\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator {\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n\n // AsyncIterableIterator to iterate over blobs\n const iter = this.listItems(updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the ContinuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The ContinuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list blobs operation.\n */\n private async *listHierarchySegments(\n delimiter: string,\n marker?: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator {\n let listBlobsHierarchySegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listBlobsHierarchySegmentResponse = await this.listBlobHierarchySegment(\n delimiter,\n marker,\n options\n );\n marker = listBlobsHierarchySegmentResponse.continuationToken;\n yield await listBlobsHierarchySegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects.\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n private async *listItemsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsSegmentOptions = {}\n ): AsyncIterableIterator<({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem)> {\n let marker: string | undefined;\n for await (const listBlobsHierarchySegmentResponse of this.listHierarchySegments(\n delimiter,\n marker,\n options\n )) {\n const segment = listBlobsHierarchySegmentResponse.segment;\n if (segment.blobPrefixes) {\n for (const prefix of segment.blobPrefixes) {\n yield {\n kind: \"prefix\",\n ...prefix,\n };\n }\n }\n for (const blob of segment.blobItems) {\n yield { kind: \"blob\", ...blob };\n }\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the blobs by hierarchy.\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * for await (const item of containerClient.listBlobsByHierarchy(\"/\")) {\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let iter = containerClient.listBlobsByHierarchy(\"/\", { prefix: \"prefix1/\" });\n * let entity = await iter.next();\n * while (!entity.done) {\n * let item = entity.value;\n * if (item.kind === \"prefix\") {\n * console.log(`\\tBlobPrefix: ${item.name}`);\n * } else {\n * console.log(`\\tBlobItem: name - ${item.name}`);\n * }\n * entity = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page\");\n * for await (const response of containerClient.listBlobsByHierarchy(\"/\").byPage()) {\n * const segment = response.segment;\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * Example using paging with a max page size:\n *\n * ```js\n * console.log(\"Listing blobs by hierarchy by page, specifying a prefix and a max page size\");\n *\n * let i = 1;\n * for await (const response of containerClient\n * .listBlobsByHierarchy(\"/\", { prefix: \"prefix2/sub1/\" })\n * .byPage({ maxPageSize: 2 })) {\n * console.log(`Page ${i++}`);\n * const segment = response.segment;\n *\n * if (segment.blobPrefixes) {\n * for (const prefix of segment.blobPrefixes) {\n * console.log(`\\tBlobPrefix: ${prefix.name}`);\n * }\n * }\n *\n * for (const blob of response.segment.blobItems) {\n * console.log(`\\tBlobItem: name - ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param delimiter - The character or string used to define the virtual hierarchy\n * @param options - Options to list blobs operation.\n */\n public listBlobsByHierarchy(\n delimiter: string,\n options: ContainerListBlobsOptions = {}\n ): PagedAsyncIterableIterator<\n ({ kind: \"prefix\" } & BlobPrefix) | ({ kind: \"blob\" } & BlobItem),\n ContainerListBlobHierarchySegmentResponse\n > {\n if (delimiter === \"\") {\n throw new RangeError(\"delimiter should contain one or more characters\");\n }\n\n const include: ListBlobsIncludeItem[] = [];\n if (options.includeCopy) {\n include.push(\"copy\");\n }\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSnapshots) {\n include.push(\"snapshots\");\n }\n if (options.includeVersions) {\n include.push(\"versions\");\n }\n if (options.includeUncommitedBlobs) {\n include.push(\"uncommittedblobs\");\n }\n if (options.includeTags) {\n include.push(\"tags\");\n }\n if (options.includeDeletedWithVersions) {\n include.push(\"deletedwithversions\");\n }\n if (options.includeImmutabilityPolicy) {\n include.push(\"immutabilitypolicy\");\n }\n if (options.includeLegalHold) {\n include.push(\"legalhold\");\n }\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const updatedOptions: ContainerListBlobsSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include: include } : {}),\n };\n // AsyncIterableIterator to iterate over blob prefixes and blobs\n const iter = this.listItemsByHierarchy(delimiter, updatedOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n async next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listHierarchySegments(delimiter, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...updatedOptions,\n });\n },\n };\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs in the container whose tags\n * match a given search expression.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"ContainerClient-findBlobsByTagsSegment\", options);\n\n try {\n const response = await this.containerContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ContainerFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified container.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of containerClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = containerClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = containerClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = containerClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ContainerFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ContainerFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n private getContainerNameFromUrl(): string {\n let containerName;\n try {\n // URL may look like the following\n // \"https://myaccount.blob.core.windows.net/mycontainer?sasString\";\n // \"https://myaccount.blob.core.windows.net/mycontainer\";\n // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername`\n // http://localhost:10001/devstoreaccount1/containername\n\n const parsedUrl = URLBuilder.parse(this.url);\n\n if (parsedUrl.getHost()!.split(\".\")[1] === \"blob\") {\n // \"https://myaccount.blob.core.windows.net/containername\".\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n } else if (isIpEndpointStyle(parsedUrl)) {\n // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername\n // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername\n // .getPath() -> /devstoreaccount1/containername\n containerName = parsedUrl.getPath()!.split(\"/\")[2];\n } else {\n // \"https://customdomain.com/containername\".\n // .getPath() -> /containername\n containerName = parsedUrl.getPath()!.split(\"/\")[1];\n }\n\n // decode the encoded containerName - to get all the special characters that might be present in it\n containerName = decodeURIComponent(containerName);\n\n if (!containerName) {\n throw new Error(\"Provided containerName is invalid.\");\n }\n\n return containerName;\n } catch (error: any) {\n throw new Error(\"Unable to extract containerName with provided information.\");\n }\n }\n\n /**\n * Only available for ContainerClient constructed with a shared key credential.\n *\n * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas\n *\n * @param options - Optional parameters.\n * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise {\n return new Promise((resolve) => {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw new RangeError(\n \"Can only generate the SAS when the client is initialized with a shared key credential\"\n );\n }\n\n const sas = generateBlobSASQueryParameters(\n {\n containerName: this._containerName,\n ...options,\n },\n this.credential\n ).toString();\n\n resolve(appendToURLQuery(this.url, sas));\n });\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this container.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the\n * values are set, this should be serialized with toString and set as the permissions field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but\n * the order of the permissions is particular and this class guarantees correctness.\n */\nexport class AccountSASPermissions {\n /**\n * Parse initializes the AccountSASPermissions fields from a string.\n *\n * @param permissions -\n */\n public static parse(permissions: string): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n\n for (const c of permissions) {\n switch (c) {\n case \"r\":\n accountSASPermissions.read = true;\n break;\n case \"w\":\n accountSASPermissions.write = true;\n break;\n case \"d\":\n accountSASPermissions.delete = true;\n break;\n case \"x\":\n accountSASPermissions.deleteVersion = true;\n break;\n case \"l\":\n accountSASPermissions.list = true;\n break;\n case \"a\":\n accountSASPermissions.add = true;\n break;\n case \"c\":\n accountSASPermissions.create = true;\n break;\n case \"u\":\n accountSASPermissions.update = true;\n break;\n case \"p\":\n accountSASPermissions.process = true;\n break;\n case \"t\":\n accountSASPermissions.tag = true;\n break;\n case \"f\":\n accountSASPermissions.filter = true;\n break;\n case \"i\":\n accountSASPermissions.setImmutabilityPolicy = true;\n break;\n case \"y\":\n accountSASPermissions.permanentDelete = true;\n break;\n default:\n throw new RangeError(`Invalid permission character: ${c}`);\n }\n }\n\n return accountSASPermissions;\n }\n\n /**\n * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it\n * and boolean values for them.\n *\n * @param permissionLike -\n */\n public static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions {\n const accountSASPermissions = new AccountSASPermissions();\n if (permissionLike.read) {\n accountSASPermissions.read = true;\n }\n if (permissionLike.write) {\n accountSASPermissions.write = true;\n }\n if (permissionLike.delete) {\n accountSASPermissions.delete = true;\n }\n if (permissionLike.deleteVersion) {\n accountSASPermissions.deleteVersion = true;\n }\n if (permissionLike.filter) {\n accountSASPermissions.filter = true;\n }\n if (permissionLike.tag) {\n accountSASPermissions.tag = true;\n }\n if (permissionLike.list) {\n accountSASPermissions.list = true;\n }\n if (permissionLike.add) {\n accountSASPermissions.add = true;\n }\n if (permissionLike.create) {\n accountSASPermissions.create = true;\n }\n if (permissionLike.update) {\n accountSASPermissions.update = true;\n }\n if (permissionLike.process) {\n accountSASPermissions.process = true;\n }\n if (permissionLike.setImmutabilityPolicy) {\n accountSASPermissions.setImmutabilityPolicy = true;\n }\n if (permissionLike.permanentDelete) {\n accountSASPermissions.permanentDelete = true;\n }\n return accountSASPermissions;\n }\n\n /**\n * Permission to read resources and list queues and tables granted.\n */\n public read: boolean = false;\n\n /**\n * Permission to write resources granted.\n */\n public write: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public delete: boolean = false;\n\n /**\n * Permission to delete versions granted.\n */\n public deleteVersion: boolean = false;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n public list: boolean = false;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n public add: boolean = false;\n\n /**\n * Permission to create blobs and files granted.\n */\n public create: boolean = false;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n public update: boolean = false;\n\n /**\n * Permission to get and delete messages granted.\n */\n public process: boolean = false;\n\n /**\n * Specfies Tag access granted.\n */\n public tag: boolean = false;\n\n /**\n * Permission to filter blobs.\n */\n public filter: boolean = false;\n\n /**\n * Permission to set immutability policy.\n */\n public setImmutabilityPolicy: boolean = false;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n public permanentDelete: boolean = false;\n\n /**\n * Produces the SAS permissions string for an Azure Storage account.\n * Call this method to set AccountSASSignatureValues Permissions field.\n *\n * Using this method will guarantee the resource types are in\n * an order accepted by the service.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n // The order of the characters should be as specified here to ensure correctness:\n // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n // Use a string array instead of string concatenating += operator for performance\n const permissions: string[] = [];\n if (this.read) {\n permissions.push(\"r\");\n }\n if (this.write) {\n permissions.push(\"w\");\n }\n if (this.delete) {\n permissions.push(\"d\");\n }\n if (this.deleteVersion) {\n permissions.push(\"x\");\n }\n if (this.filter) {\n permissions.push(\"f\");\n }\n if (this.tag) {\n permissions.push(\"t\");\n }\n if (this.list) {\n permissions.push(\"l\");\n }\n if (this.add) {\n permissions.push(\"a\");\n }\n if (this.create) {\n permissions.push(\"c\");\n }\n if (this.update) {\n permissions.push(\"u\");\n }\n if (this.process) {\n permissions.push(\"p\");\n }\n if (this.setImmutabilityPolicy) {\n permissions.push(\"i\");\n }\n if (this.permanentDelete) {\n permissions.push(\"y\");\n }\n return permissions.join(\"\");\n }\n}\n\n/**\n * A type that looks like an account SAS permission.\n * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects.\n */\nexport interface AccountSASPermissionsLike {\n /**\n * Permission to read resources and list queues and tables granted.\n */\n read?: boolean;\n\n /**\n * Permission to write resources granted.\n */\n write?: boolean;\n\n /**\n * Permission to delete blobs and files granted.\n */\n delete?: boolean;\n\n /**\n * Permission to delete versions granted.\n */\n deleteVersion?: boolean;\n\n /**\n * Permission to list blob containers, blobs, shares, directories, and files granted.\n */\n list?: boolean;\n\n /**\n * Permission to add messages, table entities, and append to blobs granted.\n */\n add?: boolean;\n\n /**\n * Permission to create blobs and files granted.\n */\n create?: boolean;\n\n /**\n * Permissions to update messages and table entities granted.\n */\n update?: boolean;\n\n /**\n * Permission to get and delete messages granted.\n */\n process?: boolean;\n\n /**\n * Specfies Tag access granted.\n */\n tag?: boolean;\n\n /**\n * Permission to filter blobs.\n */\n filter?: boolean;\n\n /**\n * Permission to set immutability policy.\n */\n setImmutabilityPolicy?: boolean;\n\n /**\n * Specifies that Permanent Delete is permitted.\n */\n permanentDelete?: boolean;\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the\n * values are set, this should be serialized with toString and set as the resources field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but\n * the order of the resources is particular and this class guarantees correctness.\n */\nexport class AccountSASResourceTypes {\n /**\n * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid resource type.\n *\n * @param resourceTypes -\n */\n public static parse(resourceTypes: string): AccountSASResourceTypes {\n const accountSASResourceTypes = new AccountSASResourceTypes();\n\n for (const c of resourceTypes) {\n switch (c) {\n case \"s\":\n accountSASResourceTypes.service = true;\n break;\n case \"c\":\n accountSASResourceTypes.container = true;\n break;\n case \"o\":\n accountSASResourceTypes.object = true;\n break;\n default:\n throw new RangeError(`Invalid resource type: ${c}`);\n }\n }\n\n return accountSASResourceTypes;\n }\n\n /**\n * Permission to access service level APIs granted.\n */\n public service: boolean = false;\n\n /**\n * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted.\n */\n public container: boolean = false;\n\n /**\n * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted.\n */\n public object: boolean = false;\n\n /**\n * Converts the given resource types to a string.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n */\n public toString(): string {\n const resourceTypes: string[] = [];\n if (this.service) {\n resourceTypes.push(\"s\");\n }\n if (this.container) {\n resourceTypes.push(\"c\");\n }\n if (this.object) {\n resourceTypes.push(\"o\");\n }\n return resourceTypes.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value\n * to true means that any SAS which uses these permissions will grant access to that service. Once all the\n * values are set, this should be serialized with toString and set as the services field on an\n * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but\n * the order of the services is particular and this class guarantees correctness.\n */\nexport class AccountSASServices {\n /**\n * Creates an {@link AccountSASServices} from the specified services string. This method will throw an\n * Error if it encounters a character that does not correspond to a valid service.\n *\n * @param services -\n */\n public static parse(services: string): AccountSASServices {\n const accountSASServices = new AccountSASServices();\n\n for (const c of services) {\n switch (c) {\n case \"b\":\n accountSASServices.blob = true;\n break;\n case \"f\":\n accountSASServices.file = true;\n break;\n case \"q\":\n accountSASServices.queue = true;\n break;\n case \"t\":\n accountSASServices.table = true;\n break;\n default:\n throw new RangeError(`Invalid service character: ${c}`);\n }\n }\n\n return accountSASServices;\n }\n\n /**\n * Permission to access blob resources granted.\n */\n public blob: boolean = false;\n\n /**\n * Permission to access file resources granted.\n */\n public file: boolean = false;\n\n /**\n * Permission to access queue resources granted.\n */\n public queue: boolean = false;\n\n /**\n * Permission to access table resources granted.\n */\n public table: boolean = false;\n\n /**\n * Converts the given services to a string.\n *\n */\n public toString(): string {\n const services: string[] = [];\n if (this.blob) {\n services.push(\"b\");\n }\n if (this.table) {\n services.push(\"t\");\n }\n if (this.queue) {\n services.push(\"q\");\n }\n if (this.file) {\n services.push(\"f\");\n }\n return services.join(\"\");\n }\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\n\nimport { AccountSASPermissions } from \"./AccountSASPermissions\";\nimport { AccountSASResourceTypes } from \"./AccountSASResourceTypes\";\nimport { AccountSASServices } from \"./AccountSASServices\";\nimport { StorageSharedKeyCredential } from \"../credentials/StorageSharedKeyCredential\";\nimport { SasIPRange, ipRangeToString } from \"./SasIPRange\";\nimport { SASProtocol, SASQueryParameters } from \"./SASQueryParameters\";\nimport { SERVICE_VERSION } from \"../utils/constants\";\nimport { truncatedISO8061Date } from \"../utils/utils.common\";\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once\n * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation\n * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters}\n * exist because the former is mutable and a logical representation while the latter is immutable and used to generate\n * actual REST requests.\n *\n * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1\n * for more conceptual information on SAS\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n * for descriptions of the parameters, including which are required\n */\nexport interface AccountSASSignatureValues {\n /**\n * If not provided, this defaults to the service version targeted by this version of the library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n\n /**\n * The time after which the SAS will no longer work.\n */\n expiresOn: Date;\n\n /**\n * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help\n * constructing the permissions string.\n */\n permissions: AccountSASPermissions;\n\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n\n /**\n * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to\n * construct this value.\n */\n services: string;\n\n /**\n * The values that indicate the resource types accessible with this SAS. Please refer\n * to {@link AccountSASResourceTypes} to construct this value.\n */\n resourceTypes: string;\n\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * ONLY AVAILABLE IN NODE.JS RUNTIME.\n *\n * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual\n * REST request.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas\n *\n * @param accountSASSignatureValues -\n * @param sharedKeyCredential -\n */\nexport function generateAccountSASQueryParameters(\n accountSASSignatureValues: AccountSASSignatureValues,\n sharedKeyCredential: StorageSharedKeyCredential\n): SASQueryParameters {\n const version = accountSASSignatureValues.version\n ? accountSASSignatureValues.version\n : SERVICE_VERSION;\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.setImmutabilityPolicy &&\n version < \"2020-08-04\"\n ) {\n throw RangeError(\"'version' must be >= '2020-08-04' when provided 'i' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.deleteVersion &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'x' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.permanentDelete &&\n version < \"2019-10-10\"\n ) {\n throw RangeError(\"'version' must be >= '2019-10-10' when provided 'y' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.tag &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 't' permission.\");\n }\n\n if (\n accountSASSignatureValues.permissions &&\n accountSASSignatureValues.permissions.filter &&\n version < \"2019-12-12\"\n ) {\n throw RangeError(\"'version' must be >= '2019-12-12' when provided 'f' permission.\");\n }\n\n if (accountSASSignatureValues.encryptionScope && version < \"2020-12-06\") {\n throw RangeError(\"'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS.\");\n }\n\n const parsedPermissions = AccountSASPermissions.parse(\n accountSASSignatureValues.permissions.toString()\n );\n const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString();\n const parsedResourceTypes = AccountSASResourceTypes.parse(\n accountSASSignatureValues.resourceTypes\n ).toString();\n\n let stringToSign: string;\n\n if (version >= \"2020-12-06\") {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : \"\",\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n } else {\n stringToSign = [\n sharedKeyCredential.accountName,\n parsedPermissions,\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.startsOn\n ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false)\n : \"\",\n truncatedISO8061Date(accountSASSignatureValues.expiresOn, false),\n accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : \"\",\n accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : \"\",\n version,\n \"\", // Account SAS requires an additional newline character\n ].join(\"\\n\");\n }\n\n const signature: string = sharedKeyCredential.computeHMACSHA256(stringToSign);\n\n return new SASQueryParameters(\n version,\n signature,\n parsedPermissions.toString(),\n parsedServices,\n parsedResourceTypes,\n accountSASSignatureValues.protocol,\n accountSASSignatureValues.startsOn,\n accountSASSignatureValues.expiresOn,\n accountSASSignatureValues.ipRange,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n undefined,\n accountSASSignatureValues.encryptionScope\n );\n}\n","// Copyright (c) Microsoft Corporation.\n// Licensed under the MIT license.\nimport {\n TokenCredential,\n isTokenCredential,\n isNode,\n HttpResponse,\n getDefaultProxySettings,\n} from \"@azure/core-http\";\nimport { SpanStatusCode } from \"@azure/core-tracing\";\nimport { AbortSignalLike } from \"@azure/abort-controller\";\nimport {\n ServiceGetUserDelegationKeyHeaders,\n ContainerCreateResponse,\n ContainerDeleteResponse,\n ServiceGetPropertiesResponse,\n BlobServiceProperties,\n ServiceSetPropertiesResponse,\n ServiceGetStatisticsResponse,\n ServiceGetAccountInfoResponse,\n ServiceListContainersSegmentResponse,\n ContainerItem,\n UserDelegationKeyModel,\n ContainerUndeleteResponse,\n FilterBlobSegmentModel,\n ServiceFilterBlobsHeaders,\n ContainerRenameResponse,\n LeaseAccessConditions,\n FilterBlobSegment,\n FilterBlobItem,\n} from \"./generatedModels\";\nimport { Container, Service } from \"./generated/src/operations\";\nimport { newPipeline, StoragePipelineOptions, PipelineLike, isPipelineLike } from \"./Pipeline\";\nimport {\n ContainerClient,\n ContainerCreateOptions,\n ContainerDeleteMethodOptions,\n} from \"./ContainerClient\";\nimport {\n appendToURLPath,\n appendToURLQuery,\n extractConnectionStringParts,\n toTags,\n} from \"./utils/utils.common\";\nimport { StorageSharedKeyCredential } from \"./credentials/StorageSharedKeyCredential\";\nimport { AnonymousCredential } from \"./credentials/AnonymousCredential\";\nimport \"@azure/core-paging\";\nimport { PageSettings, PagedAsyncIterableIterator } from \"@azure/core-paging\";\nimport { truncatedISO8061Date } from \"./utils/utils.common\";\nimport { convertTracingToRequestOptionsBase, createSpan } from \"./utils/tracing\";\nimport { BlobBatchClient } from \"./BlobBatchClient\";\nimport { CommonOptions, StorageClient } from \"./StorageClient\";\nimport { AccountSASPermissions } from \"./sas/AccountSASPermissions\";\nimport { SASProtocol } from \"./sas/SASQueryParameters\";\nimport { SasIPRange } from \"./sas/SasIPRange\";\nimport { generateAccountSASQueryParameters } from \"./sas/AccountSASSignatureValues\";\nimport { AccountSASServices } from \"./sas/AccountSASServices\";\nimport { ListContainersIncludeType } from \"./generated/src\";\n\n/**\n * Options to configure the {@link BlobServiceClient.getProperties} operation.\n */\nexport interface ServiceGetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.setProperties} operation.\n */\nexport interface ServiceSetPropertiesOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getAccountInfo} operation.\n */\nexport interface ServiceGetAccountInfoOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.getStatistics} operation.\n */\nexport interface ServiceGetStatisticsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the Service - Get User Delegation Key.\n */\nexport interface ServiceGetUserDelegationKeyOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainerSegment} operation.\n */\ninterface ServiceListContainersSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies the maximum number of containers\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n /**\n * Include this parameter to\n * specify that the container's metadata be returned as part of the response\n * body. Possible values include: 'metadata'\n */\n include?: ListContainersIncludeType | ListContainersIncludeType[];\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.listContainers} operation.\n */\nexport interface ServiceListContainersOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Filters the results to return only containers\n * whose name begins with the specified prefix.\n */\n prefix?: string;\n /**\n * Specifies whether the container's metadata\n * should be returned as part of the response body.\n */\n includeMetadata?: boolean;\n\n /**\n * Specifies whether soft deleted containers should be included in the response.\n */\n includeDeleted?: boolean;\n /**\n * Specifies whether system containers should be included in the response.\n */\n includeSystem?: boolean;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTagsSegment} operation.\n */\ninterface ServiceFindBlobsByTagsSegmentOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Specifies the maximum number of blobs\n * to return. If the request does not specify maxPageSize, or specifies a\n * value greater than 5000, the server will return up to 5000 items. Note\n * that if the listing operation crosses a partition boundary, then the\n * service will return a continuation token for retrieving the remainder of\n * the results. For this reason, it is possible that the service will return\n * fewer results than specified by maxPageSize, or than the default of 5000.\n */\n maxPageSize?: number;\n}\n\n/**\n * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport interface ServiceFindBlobByTagsOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n}\n\n/**\n * The response of {@link BlobServiceClient.findBlobsByTags} operation.\n */\nexport type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment &\n ServiceFilterBlobsHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceFilterBlobsHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: FilterBlobSegmentModel;\n };\n };\n\n/**\n * A user delegation key.\n */\nexport interface UserDelegationKey {\n /**\n * The Azure Active Directory object ID in GUID format.\n */\n signedObjectId: string;\n /**\n * The Azure Active Directory tenant ID in GUID format.\n */\n signedTenantId: string;\n /**\n * The date-time the key is active.\n */\n signedStartsOn: Date;\n /**\n * The date-time the key expires.\n */\n signedExpiresOn: Date;\n /**\n * Abbreviation of the Azure Storage service that accepts the key.\n */\n signedService: string;\n /**\n * The service version that created the key.\n */\n signedVersion: string;\n /**\n * The key as a base64 string.\n */\n value: string;\n}\n\n/**\n * Contains response data for the {@link getUserDelegationKey} operation.\n */\nexport declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey &\n ServiceGetUserDelegationKeyHeaders & {\n /**\n * The underlying HTTP response.\n */\n _response: HttpResponse & {\n /**\n * The parsed HTTP response headers.\n */\n parsedHeaders: ServiceGetUserDelegationKeyHeaders;\n\n /**\n * The response body as text (string format)\n */\n bodyAsText: string;\n\n /**\n * The response body as parsed JSON or XML\n */\n parsedBody: UserDelegationKeyModel;\n };\n };\n\n/**\n * Options to configure {@link BlobServiceClient.undeleteContainer} operation.\n */\nexport interface ServiceUndeleteContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n /**\n * Optional. Specifies the new name of the restored container.\n * Will use its original name if this is not specified.\n * @deprecated Restore container to a different name is not supported by service anymore.\n */\n destinationContainerName?: string;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.renameContainer} operation.\n */\nexport interface ServiceRenameContainerOptions extends CommonOptions {\n /**\n * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation.\n * For example, use the @azure/abort-controller to create an `AbortSignal`.\n */\n abortSignal?: AbortSignalLike;\n\n /**\n * Condition to meet for the source container.\n */\n sourceCondition?: LeaseAccessConditions;\n}\n\n/**\n * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation.\n */\nexport interface ServiceGenerateAccountSasUrlOptions {\n /**\n * The version of the service this SAS will target. If not specified, it will default to the version targeted by the\n * library.\n */\n version?: string;\n\n /**\n * Optional. SAS protocols allowed.\n */\n protocol?: SASProtocol;\n\n /**\n * Optional. When the SAS will take effect.\n */\n startsOn?: Date;\n /**\n * Optional. IP range allowed.\n */\n ipRange?: SasIPRange;\n /**\n * Optional. Encryption scope to use when sending requests authorized with this SAS URI.\n */\n encryptionScope?: string;\n}\n\n/**\n * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you\n * to manipulate blob containers.\n */\nexport class BlobServiceClient extends StorageClient {\n /**\n * serviceContext provided by protocol layer.\n */\n private serviceContext: Service;\n\n /**\n *\n * Creates an instance of BlobServiceClient from connection string.\n *\n * @param connectionString - Account connection string or a SAS connection string of an Azure storage account.\n * [ Note - Account connection string can only be used in NODE.JS runtime. ]\n * Account connection string example -\n * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net`\n * SAS connection string example -\n * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString`\n * @param options - Optional. Options to configure the HTTP pipeline.\n */\n public static fromConnectionString(\n connectionString: string,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ): BlobServiceClient {\n options = options || {};\n const extractedCreds = extractConnectionStringParts(connectionString);\n if (extractedCreds.kind === \"AccountConnString\") {\n if (isNode) {\n const sharedKeyCredential = new StorageSharedKeyCredential(\n extractedCreds.accountName!,\n extractedCreds.accountKey\n );\n\n if (!options.proxyOptions) {\n options.proxyOptions = getDefaultProxySettings(extractedCreds.proxyUri);\n }\n\n const pipeline = newPipeline(sharedKeyCredential, options);\n return new BlobServiceClient(extractedCreds.url, pipeline);\n } else {\n throw new Error(\"Account connection string is only supported in Node.js environment\");\n }\n } else if (extractedCreds.kind === \"SASConnString\") {\n const pipeline = newPipeline(new AnonymousCredential(), options);\n return new BlobServiceClient(extractedCreds.url + \"?\" + extractedCreds.accountSas, pipeline);\n } else {\n throw new Error(\n \"Connection string must be either an Account connection string or a SAS connection string\"\n );\n }\n }\n\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used.\n * @param options - Optional. Options to configure the HTTP pipeline.\n *\n * Example using DefaultAzureCredential from `@azure/identity`:\n *\n * ```js\n * const account = \"\";\n *\n * const defaultAzureCredential = new DefaultAzureCredential();\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * defaultAzureCredential\n * );\n * ```\n *\n * Example using an account name/key:\n *\n * ```js\n * const account = \"\"\n * const sharedKeyCredential = new StorageSharedKeyCredential(account, \"\");\n *\n * const blobServiceClient = new BlobServiceClient(\n * `https://${account}.blob.core.windows.net`,\n * sharedKeyCredential\n * );\n * ```\n */\n constructor(\n url: string,\n credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n );\n /**\n * Creates an instance of BlobServiceClient.\n *\n * @param url - A Client string pointing to Azure Storage blob service, such as\n * \"https://myaccount.blob.core.windows.net\". You can append a SAS\n * if using AnonymousCredential, such as \"https://myaccount.blob.core.windows.net?sasString\".\n * @param pipeline - Call newPipeline() to create a default\n * pipeline, or provide a customized pipeline.\n */\n constructor(url: string, pipeline: PipelineLike);\n constructor(\n url: string,\n credentialOrPipeline?:\n | StorageSharedKeyCredential\n | AnonymousCredential\n | TokenCredential\n | PipelineLike,\n // Legacy, no fix for eslint error without breaking. Disable it for this interface.\n /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/\n options?: StoragePipelineOptions\n ) {\n let pipeline: PipelineLike;\n if (isPipelineLike(credentialOrPipeline)) {\n pipeline = credentialOrPipeline;\n } else if (\n (isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) ||\n credentialOrPipeline instanceof AnonymousCredential ||\n isTokenCredential(credentialOrPipeline)\n ) {\n pipeline = newPipeline(credentialOrPipeline, options);\n } else {\n // The second parameter is undefined. Use anonymous credential\n pipeline = newPipeline(new AnonymousCredential(), options);\n }\n super(url, pipeline);\n this.serviceContext = new Service(this.storageClientContext);\n }\n\n /**\n * Creates a {@link ContainerClient} object\n *\n * @param containerName - A container name\n * @returns A new ContainerClient object for the given container name.\n *\n * Example usage:\n *\n * ```js\n * const containerClient = blobServiceClient.getContainerClient(\"\");\n * ```\n */\n public getContainerClient(containerName: string): ContainerClient {\n return new ContainerClient(\n appendToURLPath(this.url, encodeURIComponent(containerName)),\n this.pipeline\n );\n }\n\n /**\n * Create a Blob container.\n *\n * @param containerName - Name of the container to create.\n * @param options - Options to configure Container Create operation.\n * @returns Container creation response and the corresponding container client.\n */\n public async createContainer(\n containerName: string,\n options: ContainerCreateOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerCreateResponse: ContainerCreateResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-createContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n const containerCreateResponse = await containerClient.create(updatedOptions);\n return {\n containerClient,\n containerCreateResponse,\n };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Deletes a Blob container.\n *\n * @param containerName - Name of the container to delete.\n * @param options - Options to configure Container Delete operation.\n * @returns Container deletion response.\n */\n public async deleteContainer(\n containerName: string,\n options: ContainerDeleteMethodOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-deleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(containerName);\n return await containerClient.delete(updatedOptions);\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Restore a previously deleted Blob container.\n * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container.\n *\n * @param deletedContainerName - Name of the previously deleted container.\n * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container.\n * @param options - Options to configure Container Restore operation.\n * @returns Container deletion response.\n */\n public async undeleteContainer(\n deletedContainerName: string,\n deletedContainerVersion: string,\n options: ServiceUndeleteContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerUndeleteResponse: ContainerUndeleteResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-undeleteContainer\", options);\n try {\n const containerClient = this.getContainerClient(\n options.destinationContainerName || deletedContainerName\n );\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerUndeleteResponse = await containerContext.restore({\n deletedContainerName,\n deletedContainerVersion,\n ...updatedOptions,\n });\n return { containerClient, containerUndeleteResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Rename an existing Blob Container.\n *\n * @param sourceContainerName - The name of the source container.\n * @param destinationContainerName - The new name of the container.\n * @param options - Options to configure Container Rename operation.\n */\n /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */\n // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready.\n private async renameContainer(\n sourceContainerName: string,\n destinationContainerName: string,\n options: ServiceRenameContainerOptions = {}\n ): Promise<{\n containerClient: ContainerClient;\n containerRenameResponse: ContainerRenameResponse;\n }> {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-renameContainer\", options);\n try {\n const containerClient = this.getContainerClient(destinationContainerName);\n // Hack to access a protected member.\n const containerContext = new Container(containerClient[\"storageClientContext\"]);\n const containerRenameResponse = await containerContext.rename(sourceContainerName, {\n ...updatedOptions,\n sourceLeaseId: options.sourceCondition?.leaseId,\n });\n return { containerClient, containerRenameResponse };\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Gets the properties of a storage account’s Blob service, including properties\n * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * @param options - Options to the Service Get Properties operation.\n * @returns Response data for the Service Get Properties operation.\n */\n public async getProperties(\n options: ServiceGetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getProperties\", options);\n try {\n return await this.serviceContext.getProperties({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Sets properties for a storage account’s Blob service endpoint, including properties\n * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties\n *\n * @param properties -\n * @param options - Options to the Service Set Properties operation.\n * @returns Response data for the Service Set Properties operation.\n */\n public async setProperties(\n properties: BlobServiceProperties,\n options: ServiceSetPropertiesOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-setProperties\", options);\n try {\n return await this.serviceContext.setProperties(properties, {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Retrieves statistics related to replication for the Blob service. It is only\n * available on the secondary location endpoint when read-access geo-redundant\n * replication is enabled for the storage account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats\n *\n * @param options - Options to the Service Get Statistics operation.\n * @returns Response data for the Service Get Statistics operation.\n */\n public async getStatistics(\n options: ServiceGetStatisticsOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getStatistics\", options);\n try {\n return await this.serviceContext.getStatistics({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Get Account Information operation returns the sku name and account kind\n * for the specified account.\n * The Get Account Information operation is available on service versions beginning\n * with version 2018-03-28.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information\n *\n * @param options - Options to the Service Get Account Info operation.\n * @returns Response data for the Service Get Account Info operation.\n */\n public async getAccountInfo(\n options: ServiceGetAccountInfoOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getAccountInfo\", options);\n try {\n return await this.serviceContext.getAccountInfo({\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns a list of the containers under the specified account.\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to the Service List Container Segment operation.\n * @returns Response data for the Service List Container Segment operation.\n */\n private async listContainersSegment(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-listContainersSegment\", options);\n\n try {\n return await this.serviceContext.listContainersSegment({\n abortSignal: options.abortSignal,\n marker,\n ...options,\n include: typeof options.include === \"string\" ? [options.include] : options.include,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * The Filter Blobs operation enables callers to list blobs across all containers whose tags\n * match a given search expression. Filter blobs searches across all containers within a\n * storage account but can be scoped within the expression to a single container.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async findBlobsByTagsSegment(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\n \"BlobServiceClient-findBlobsByTagsSegment\",\n options\n );\n\n try {\n const response = await this.serviceContext.filterBlobs({\n abortSignal: options.abortSignal,\n where: tagFilterSqlExpression,\n marker,\n maxPageSize: options.maxPageSize,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n });\n\n const wrappedResponse: ServiceFindBlobsByTagsSegmentResponse = {\n ...response,\n _response: response._response, // _response is made non-enumerable\n blobs: response.blobs.map((blob) => {\n let tagValue = \"\";\n if (blob.tags?.blobTagSet.length === 1) {\n tagValue = blob.tags.blobTagSet[0].value;\n }\n return { ...blob, tags: toTags(blob.tags), tagValue };\n }),\n };\n return wrappedResponse;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param marker - A string value that identifies the portion of\n * the list of blobs to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all blobs remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to find blobs by tags.\n */\n private async *findBlobsByTagsSegments(\n tagFilterSqlExpression: string,\n marker?: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let response;\n if (!!marker || marker === undefined) {\n do {\n response = await this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options);\n response.blobs = response.blobs || [];\n marker = response.continuationToken;\n yield response;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for blobs.\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to findBlobsByTagsItems.\n */\n private async *findBlobsByTagsItems(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobsByTagsSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.findBlobsByTagsSegments(\n tagFilterSqlExpression,\n marker,\n options\n )) {\n yield* segment.blobs;\n }\n }\n\n /**\n * Returns an async iterable iterator to find all blobs with specified tag\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the blobs in pages.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const blob of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\")) {\n * console.log(`Blob ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\");\n * let blobItem = await iter.next();\n * while (!blobItem.done) {\n * console.log(`Blob ${i++}: ${blobItem.value.name}`);\n * blobItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 20 })) {\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.findBlobsByTags(\"tagkey='tagvalue'\").byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .findBlobsByTags(\"tagkey='tagvalue'\")\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints blob names\n * if (response.blobs) {\n * for (const blob of response.blobs) {\n * console.log(`Blob ${i++}: ${blob.name}`);\n * }\n * }\n * ```\n *\n * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression.\n * The given expression must evaluate to true for a blob to be returned in the results.\n * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter;\n * however, only a subset of the OData filter syntax is supported in the Blob service.\n * @param options - Options to find blobs by tags.\n */\n public findBlobsByTags(\n tagFilterSqlExpression: string,\n options: ServiceFindBlobByTagsOptions = {}\n ): PagedAsyncIterableIterator {\n // AsyncIterableIterator to iterate over blobs\n const listSegmentOptions: ServiceFindBlobsByTagsSegmentOptions = {\n ...options,\n };\n\n const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses\n *\n * @param marker - A string value that identifies the portion of\n * the list of containers to be returned with the next listing operation. The\n * operation returns the continuationToken value within the response body if the\n * listing operation did not return all containers remaining to be listed\n * with the current page. The continuationToken value can be used as the value for\n * the marker parameter in a subsequent call to request the next page of list\n * items. The marker value is opaque to the client.\n * @param options - Options to list containers operation.\n */\n private async *listSegments(\n marker?: string,\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let listContainersSegmentResponse;\n if (!!marker || marker === undefined) {\n do {\n listContainersSegmentResponse = await this.listContainersSegment(marker, options);\n listContainersSegmentResponse.containerItems =\n listContainersSegmentResponse.containerItems || [];\n marker = listContainersSegmentResponse.continuationToken;\n yield await listContainersSegmentResponse;\n } while (marker);\n }\n }\n\n /**\n * Returns an AsyncIterableIterator for Container Items\n *\n * @param options - Options to list containers operation.\n */\n private async *listItems(\n options: ServiceListContainersSegmentOptions = {}\n ): AsyncIterableIterator {\n let marker: string | undefined;\n for await (const segment of this.listSegments(marker, options)) {\n yield* segment.containerItems;\n }\n }\n\n /**\n * Returns an async iterable iterator to list all the containers\n * under the specified account.\n *\n * .byPage() returns an async iterable iterator to list the containers in pages.\n *\n * Example using `for await` syntax:\n *\n * ```js\n * let i = 1;\n * for await (const container of blobServiceClient.listContainers()) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * ```\n *\n * Example using `iter.next()`:\n *\n * ```js\n * let i = 1;\n * const iter = blobServiceClient.listContainers();\n * let containerItem = await iter.next();\n * while (!containerItem.done) {\n * console.log(`Container ${i++}: ${containerItem.value.name}`);\n * containerItem = await iter.next();\n * }\n * ```\n *\n * Example using `byPage()`:\n *\n * ```js\n * // passing optional maxPageSize in the page settings\n * let i = 1;\n * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) {\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * }\n * ```\n *\n * Example using paging with a marker:\n *\n * ```js\n * let i = 1;\n * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 });\n * let response = (await iterator.next()).value;\n *\n * // Prints 2 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n *\n * // Gets next marker\n * let marker = response.continuationToken;\n * // Passing next marker as continuationToken\n * iterator = blobServiceClient\n * .listContainers()\n * .byPage({ continuationToken: marker, maxPageSize: 10 });\n * response = (await iterator.next()).value;\n *\n * // Prints 10 container names\n * if (response.containerItems) {\n * for (const container of response.containerItems) {\n * console.log(`Container ${i++}: ${container.name}`);\n * }\n * }\n * ```\n *\n * @param options - Options to list containers.\n * @returns An asyncIterableIterator that supports paging.\n */\n public listContainers(\n options: ServiceListContainersOptions = {}\n ): PagedAsyncIterableIterator {\n if (options.prefix === \"\") {\n options.prefix = undefined;\n }\n\n const include: ListContainersIncludeType[] = [];\n if (options.includeDeleted) {\n include.push(\"deleted\");\n }\n if (options.includeMetadata) {\n include.push(\"metadata\");\n }\n if (options.includeSystem) {\n include.push(\"system\");\n }\n\n // AsyncIterableIterator to iterate over containers\n const listSegmentOptions: ServiceListContainersSegmentOptions = {\n ...options,\n ...(include.length > 0 ? { include } : {}),\n };\n\n const iter = this.listItems(listSegmentOptions);\n return {\n /**\n * The next method, part of the iteration protocol\n */\n next() {\n return iter.next();\n },\n /**\n * The connection to the async iterator, part of the iteration protocol\n */\n [Symbol.asyncIterator]() {\n return this;\n },\n /**\n * Return an AsyncIterableIterator that works a page at a time\n */\n byPage: (settings: PageSettings = {}) => {\n return this.listSegments(settings.continuationToken, {\n maxPageSize: settings.maxPageSize,\n ...listSegmentOptions,\n });\n },\n };\n }\n\n /**\n * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential).\n *\n * Retrieves a user delegation key for the Blob service. This is only a valid operation when using\n * bearer token authentication.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key\n *\n * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time\n * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time\n */\n public async getUserDelegationKey(\n startsOn: Date,\n expiresOn: Date,\n options: ServiceGetUserDelegationKeyOptions = {}\n ): Promise {\n const { span, updatedOptions } = createSpan(\"BlobServiceClient-getUserDelegationKey\", options);\n try {\n const response = await this.serviceContext.getUserDelegationKey(\n {\n startsOn: truncatedISO8061Date(startsOn, false),\n expiresOn: truncatedISO8061Date(expiresOn, false),\n },\n {\n abortSignal: options.abortSignal,\n ...convertTracingToRequestOptionsBase(updatedOptions),\n }\n );\n\n const userDelegationKey = {\n signedObjectId: response.signedObjectId,\n signedTenantId: response.signedTenantId,\n signedStartsOn: new Date(response.signedStartsOn),\n signedExpiresOn: new Date(response.signedExpiresOn),\n signedService: response.signedService,\n signedVersion: response.signedVersion,\n value: response.value,\n };\n\n const res: ServiceGetUserDelegationKeyResponse = {\n _response: response._response,\n requestId: response.requestId,\n clientRequestId: response.clientRequestId,\n version: response.version,\n date: response.date,\n errorCode: response.errorCode,\n ...userDelegationKey,\n };\n\n return res;\n } catch (e: any) {\n span.setStatus({\n code: SpanStatusCode.ERROR,\n message: e.message,\n });\n throw e;\n } finally {\n span.end();\n }\n }\n\n /**\n * Creates a BlobBatchClient object to conduct batch operations.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch\n *\n * @returns A new BlobBatchClient object for this service.\n */\n public getBlobBatchClient(): BlobBatchClient {\n return new BlobBatchClient(this.url, this.pipeline);\n }\n\n /**\n * Only available for BlobServiceClient constructed with a shared key credential.\n *\n * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties\n * and parameters passed in. The SAS is signed by the shared key credential of the client.\n *\n * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas\n *\n * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided.\n * @param permissions - Specifies the list of permissions to be associated with the SAS.\n * @param resourceTypes - Specifies the resource types associated with the shared access signature.\n * @param options - Optional parameters.\n * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token.\n */\n public generateAccountSasUrl(\n expiresOn?: Date,\n permissions: AccountSASPermissions = AccountSASPermissions.parse(\"r\"),\n resourceTypes: string = \"sco\",\n options: ServiceGenerateAccountSasUrlOptions = {}\n ): string {\n if (!(this.credential instanceof StorageSharedKeyCredential)) {\n throw RangeError(\n \"Can only generate the account SAS when the client is initialized with a shared key credential\"\n );\n }\n\n if (expiresOn === undefined) {\n const now = new Date();\n expiresOn = new Date(now.getTime() + 3600 * 1000);\n }\n\n const sas = generateAccountSASQueryParameters(\n {\n permissions,\n expiresOn,\n resourceTypes,\n services: AccountSASServices.parse(\"b\").toString(),\n ...options,\n },\n this.credential\n ).toString();\n\n return appendToURLQuery(this.url, sas);\n }\n}\n"],"names":["BlobServicePropertiesMapper","QueryCollectionFormat","KeyInfoMapper","QueryRequestMapper","BlobTagsMapper","BlockLookupListMapper","coreHttp","getPropertiesOperationSpec","getAccountInfoOperationSpec","submitBatchOperationSpec","filterBlobsOperationSpec","xmlSerializer","Mappers.ServiceSetPropertiesHeaders","Mappers.StorageError","Mappers.ServiceSetPropertiesExceptionHeaders","Parameters.blobServiceProperties","Parameters.restype","Parameters.comp","Parameters.timeoutInSeconds","Parameters.url","Parameters.contentType","Parameters.accept","Parameters.version","Parameters.requestId","Mappers.BlobServiceProperties","Mappers.ServiceGetPropertiesHeaders","Mappers.ServiceGetPropertiesExceptionHeaders","Parameters.accept1","Mappers.BlobServiceStatistics","Mappers.ServiceGetStatisticsHeaders","Mappers.ServiceGetStatisticsExceptionHeaders","Parameters.comp1","Mappers.ListContainersSegmentResponse","Mappers.ServiceListContainersSegmentHeaders","Mappers.ServiceListContainersSegmentExceptionHeaders","Parameters.comp2","Parameters.prefix","Parameters.marker","Parameters.maxPageSize","Parameters.include","Mappers.UserDelegationKey","Mappers.ServiceGetUserDelegationKeyHeaders","Mappers.ServiceGetUserDelegationKeyExceptionHeaders","Parameters.keyInfo","Parameters.comp3","Mappers.ServiceGetAccountInfoHeaders","Mappers.ServiceGetAccountInfoExceptionHeaders","Parameters.restype1","Mappers.ServiceSubmitBatchHeaders","Mappers.ServiceSubmitBatchExceptionHeaders","Parameters.body","Parameters.comp4","Parameters.contentLength","Parameters.multipartContentType","Mappers.FilterBlobSegment","Mappers.ServiceFilterBlobsHeaders","Mappers.ServiceFilterBlobsExceptionHeaders","Parameters.comp5","Parameters.where","createOperationSpec","deleteOperationSpec","setMetadataOperationSpec","acquireLeaseOperationSpec","releaseLeaseOperationSpec","renewLeaseOperationSpec","breakLeaseOperationSpec","changeLeaseOperationSpec","Mappers.ContainerCreateHeaders","Mappers.ContainerCreateExceptionHeaders","Parameters.restype2","Parameters.metadata","Parameters.access","Parameters.defaultEncryptionScope","Parameters.preventEncryptionScopeOverride","Mappers.ContainerGetPropertiesHeaders","Mappers.ContainerGetPropertiesExceptionHeaders","Parameters.leaseId","Mappers.ContainerDeleteHeaders","Mappers.ContainerDeleteExceptionHeaders","Parameters.ifModifiedSince","Parameters.ifUnmodifiedSince","Mappers.ContainerSetMetadataHeaders","Mappers.ContainerSetMetadataExceptionHeaders","Parameters.comp6","Mappers.ContainerGetAccessPolicyHeaders","Mappers.ContainerGetAccessPolicyExceptionHeaders","Parameters.comp7","Mappers.ContainerSetAccessPolicyHeaders","Mappers.ContainerSetAccessPolicyExceptionHeaders","Parameters.containerAcl","Mappers.ContainerRestoreHeaders","Mappers.ContainerRestoreExceptionHeaders","Parameters.comp8","Parameters.deletedContainerName","Parameters.deletedContainerVersion","Mappers.ContainerRenameHeaders","Mappers.ContainerRenameExceptionHeaders","Parameters.comp9","Parameters.sourceContainerName","Parameters.sourceLeaseId","Mappers.ContainerSubmitBatchHeaders","Mappers.ContainerSubmitBatchExceptionHeaders","Mappers.ContainerFilterBlobsHeaders","Mappers.ContainerFilterBlobsExceptionHeaders","Mappers.ContainerAcquireLeaseHeaders","Mappers.ContainerAcquireLeaseExceptionHeaders","Parameters.comp10","Parameters.action","Parameters.duration","Parameters.proposedLeaseId","Mappers.ContainerReleaseLeaseHeaders","Mappers.ContainerReleaseLeaseExceptionHeaders","Parameters.action1","Parameters.leaseId1","Mappers.ContainerRenewLeaseHeaders","Mappers.ContainerRenewLeaseExceptionHeaders","Parameters.action2","Mappers.ContainerBreakLeaseHeaders","Mappers.ContainerBreakLeaseExceptionHeaders","Parameters.action3","Parameters.breakPeriod","Mappers.ContainerChangeLeaseHeaders","Mappers.ContainerChangeLeaseExceptionHeaders","Parameters.action4","Parameters.proposedLeaseId1","Mappers.ListBlobsFlatSegmentResponse","Mappers.ContainerListBlobFlatSegmentHeaders","Mappers.ContainerListBlobFlatSegmentExceptionHeaders","Parameters.include1","Mappers.ListBlobsHierarchySegmentResponse","Mappers.ContainerListBlobHierarchySegmentHeaders","Mappers.ContainerListBlobHierarchySegmentExceptionHeaders","Parameters.delimiter","Mappers.ContainerGetAccountInfoHeaders","Mappers.ContainerGetAccountInfoExceptionHeaders","Blob","Mappers.BlobDownloadHeaders","Mappers.BlobDownloadExceptionHeaders","Parameters.snapshot","Parameters.versionId","Parameters.range","Parameters.rangeGetContentMD5","Parameters.rangeGetContentCRC64","Parameters.encryptionKey","Parameters.encryptionKeySha256","Parameters.encryptionAlgorithm","Parameters.ifMatch","Parameters.ifNoneMatch","Parameters.ifTags","Mappers.BlobGetPropertiesHeaders","Mappers.BlobGetPropertiesExceptionHeaders","Mappers.BlobDeleteHeaders","Mappers.BlobDeleteExceptionHeaders","Parameters.blobDeleteType","Parameters.deleteSnapshots","Mappers.BlobUndeleteHeaders","Mappers.BlobUndeleteExceptionHeaders","Mappers.BlobSetExpiryHeaders","Mappers.BlobSetExpiryExceptionHeaders","Parameters.comp11","Parameters.expiryOptions","Parameters.expiresOn","Mappers.BlobSetHttpHeadersHeaders","Mappers.BlobSetHttpHeadersExceptionHeaders","Parameters.blobCacheControl","Parameters.blobContentType","Parameters.blobContentMD5","Parameters.blobContentEncoding","Parameters.blobContentLanguage","Parameters.blobContentDisposition","Mappers.BlobSetImmutabilityPolicyHeaders","Mappers.BlobSetImmutabilityPolicyExceptionHeaders","Parameters.comp12","Parameters.immutabilityPolicyExpiry","Parameters.immutabilityPolicyMode","Mappers.BlobDeleteImmutabilityPolicyHeaders","Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders","Mappers.BlobSetLegalHoldHeaders","Mappers.BlobSetLegalHoldExceptionHeaders","Parameters.comp13","Parameters.legalHold","Mappers.BlobSetMetadataHeaders","Mappers.BlobSetMetadataExceptionHeaders","Parameters.encryptionScope","Mappers.BlobAcquireLeaseHeaders","Mappers.BlobAcquireLeaseExceptionHeaders","Mappers.BlobReleaseLeaseHeaders","Mappers.BlobReleaseLeaseExceptionHeaders","Mappers.BlobRenewLeaseHeaders","Mappers.BlobRenewLeaseExceptionHeaders","Mappers.BlobChangeLeaseHeaders","Mappers.BlobChangeLeaseExceptionHeaders","Mappers.BlobBreakLeaseHeaders","Mappers.BlobBreakLeaseExceptionHeaders","Mappers.BlobCreateSnapshotHeaders","Mappers.BlobCreateSnapshotExceptionHeaders","Parameters.comp14","Mappers.BlobStartCopyFromURLHeaders","Mappers.BlobStartCopyFromURLExceptionHeaders","Parameters.tier","Parameters.rehydratePriority","Parameters.sourceIfModifiedSince","Parameters.sourceIfUnmodifiedSince","Parameters.sourceIfMatch","Parameters.sourceIfNoneMatch","Parameters.sourceIfTags","Parameters.copySource","Parameters.blobTagsString","Parameters.sealBlob","Parameters.legalHold1","Mappers.BlobCopyFromURLHeaders","Mappers.BlobCopyFromURLExceptionHeaders","Parameters.xMsRequiresSync","Parameters.sourceContentMD5","Parameters.copySourceAuthorization","Parameters.copySourceTags","Mappers.BlobAbortCopyFromURLHeaders","Mappers.BlobAbortCopyFromURLExceptionHeaders","Parameters.comp15","Parameters.copyId","Parameters.copyActionAbortConstant","Mappers.BlobSetTierHeaders","Mappers.BlobSetTierExceptionHeaders","Parameters.comp16","Parameters.tier1","Mappers.BlobGetAccountInfoHeaders","Mappers.BlobGetAccountInfoExceptionHeaders","Mappers.BlobQueryHeaders","Mappers.BlobQueryExceptionHeaders","Parameters.queryRequest","Parameters.comp17","Mappers.BlobTags","Mappers.BlobGetTagsHeaders","Mappers.BlobGetTagsExceptionHeaders","Parameters.comp18","Mappers.BlobSetTagsHeaders","Mappers.BlobSetTagsExceptionHeaders","Parameters.tags","Parameters.transactionalContentMD5","Parameters.transactionalContentCrc64","serializer","Mappers.PageBlobCreateHeaders","Mappers.PageBlobCreateExceptionHeaders","Parameters.blobType","Parameters.blobContentLength","Parameters.blobSequenceNumber","Mappers.PageBlobUploadPagesHeaders","Mappers.PageBlobUploadPagesExceptionHeaders","Parameters.body1","Parameters.comp19","Parameters.contentType1","Parameters.accept2","Parameters.pageWrite","Parameters.ifSequenceNumberLessThanOrEqualTo","Parameters.ifSequenceNumberLessThan","Parameters.ifSequenceNumberEqualTo","Mappers.PageBlobClearPagesHeaders","Mappers.PageBlobClearPagesExceptionHeaders","Parameters.pageWrite1","Mappers.PageBlobUploadPagesFromURLHeaders","Mappers.PageBlobUploadPagesFromURLExceptionHeaders","Parameters.sourceUrl","Parameters.sourceRange","Parameters.sourceContentCrc64","Parameters.range1","Mappers.PageList","Mappers.PageBlobGetPageRangesHeaders","Mappers.PageBlobGetPageRangesExceptionHeaders","Parameters.comp20","Mappers.PageBlobGetPageRangesDiffHeaders","Mappers.PageBlobGetPageRangesDiffExceptionHeaders","Parameters.prevsnapshot","Parameters.prevSnapshotUrl","Mappers.PageBlobResizeHeaders","Mappers.PageBlobResizeExceptionHeaders","Mappers.PageBlobUpdateSequenceNumberHeaders","Mappers.PageBlobUpdateSequenceNumberExceptionHeaders","Parameters.sequenceNumberAction","Mappers.PageBlobCopyIncrementalHeaders","Mappers.PageBlobCopyIncrementalExceptionHeaders","Parameters.comp21","Mappers.AppendBlobCreateHeaders","Mappers.AppendBlobCreateExceptionHeaders","Parameters.blobType1","Mappers.AppendBlobAppendBlockHeaders","Mappers.AppendBlobAppendBlockExceptionHeaders","Parameters.comp22","Parameters.maxSize","Parameters.appendPosition","Mappers.AppendBlobAppendBlockFromUrlHeaders","Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders","Parameters.sourceRange1","Mappers.AppendBlobSealHeaders","Mappers.AppendBlobSealExceptionHeaders","Parameters.comp23","Mappers.BlockBlobUploadHeaders","Mappers.BlockBlobUploadExceptionHeaders","Parameters.blobType2","Mappers.BlockBlobPutBlobFromUrlHeaders","Mappers.BlockBlobPutBlobFromUrlExceptionHeaders","Parameters.copySourceBlobProperties","Mappers.BlockBlobStageBlockHeaders","Mappers.BlockBlobStageBlockExceptionHeaders","Parameters.comp24","Parameters.blockId","Mappers.BlockBlobStageBlockFromURLHeaders","Mappers.BlockBlobStageBlockFromURLExceptionHeaders","Mappers.BlockBlobCommitBlockListHeaders","Mappers.BlockBlobCommitBlockListExceptionHeaders","Parameters.blocks","Parameters.comp25","Mappers.BlockList","Mappers.BlockBlobGetBlockListHeaders","Mappers.BlockBlobGetBlockListExceptionHeaders","Parameters.listType","createClientLogger","URLBuilder","isNode","BaseRequestPolicy","StorageRetryPolicyType","AbortError","HttpHeaders","os","DefaultHttpClient","delay","tracingPolicy","keepAlivePolicy","generateClientRequestIdPolicy","deserializationPolicy","logPolicy","proxyPolicy","disableResponseDecompressionPolicy","isTokenCredential","createHmac","createSpanFunction","SASProtocol","StorageBlob","generateUuid","SpanStatusCode","Readable","__await","BlockBlobTier","PremiumPageBlobTier","StorageBlobAudience","Poller","EventEmitter","fs","util","getDefaultProxySettings","__asyncValues","__asyncDelegator","bearerTokenAuthenticationPolicy","WebResource"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;;;;;AAMG;AAII,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,0BAA0B;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,SAAS;AACrB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,UAAU;AACtB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,eAAe;AAC3B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,iBAAiB;AAC7B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,WAAW,EAAE;AACX,oBAAA,gBAAgB,EAAE,CAAC;AACpB,iBAAA;AACD,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,oBAAoB,EAAE;AACpB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,qBAAqB;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,gBAAgB;AAC5B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,WAAW,EAAE,aAAa,CAAC;AACpD,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,eAAe;AAC3B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAA6B;AACrD,IAAA,cAAc,EAAE,eAAe;AAC/B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,eAAe;AAC1B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,uCAAuC;AACvD,gBAAA,OAAO,EAAE,uCAAuC;AAChD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,gBAAgB;AAC5B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA6B;AACtD,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gBAAgB;AAC3B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,SAAS;AACrB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA6B;AAC/C,IAAA,cAAc,EAAE,SAAS;AACzB,IAAA,OAAO,EAAE,KAAK;AACd,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,SAAS;AACpB,QAAA,eAAe,EAAE;AACf,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,kBAAkB;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,EAAE,EAAE;AACF,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,cAAc;AAC1B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,qBAAqB;AACjC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,kBAAkB;AAClC,IAAA,OAAO,EAAE,MAAM;AACf,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,wBAAwB;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,IAAI;wBACJ,IAAI;wBACJ,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,KAAK;wBACL,MAAM;wBACN,SAAS;AACV,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,0BAA0B;wBAC1B,2BAA2B;AAC5B,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,sBAAsB;AACtC,gBAAA,OAAO,EAAE,sBAAsB;AAC/B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,UAAU;AAC1B,gBAAA,OAAO,EAAE,UAAU;AACnB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,6BAA6B;AAC7C,gBAAA,OAAO,EAAE,6BAA6B;AACtC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,cAAc,EAAE,IAAI;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,0BAA0B;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,kBAAkB;AAC9B,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,UAAU;AACtB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAA6B;AACvD,IAAA,cAAc,EAAE,iBAAiB;AACjC,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iBAAiB;AAC5B,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,QAAQ;AACf,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,OAAO;AACnB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA6B;AAC7C,IAAA,cAAc,EAAE,OAAO;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,OAAO;AAClB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA6B;AAChD,IAAA,cAAc,EAAE,UAAU;AAC1B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,UAAU;AACrB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,WAAW;AACvB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA6B;AACjD,IAAA,cAAc,EAAE,WAAW;AAC3B,IAAA,OAAO,EAAE,WAAW;AACpB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,YAAY;AACrB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,GAAG,EAAE;AACH,gBAAA,cAAc,EAAE,KAAK;AACrB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,KAAK;AACd,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA6B;AACpD,IAAA,cAAc,EAAE,cAAc;AAC9B,IAAA,OAAO,EAAE,cAAc;AACvB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,cAAc;AACzB,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,aAAa;AACzB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA6B;AACnD,IAAA,cAAc,EAAE,aAAa;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,aAAa;AACxB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,CAAC;AACzD,iBAAA;AACF,aAAA;AACD,YAAA,0BAA0B,EAAE;AAC1B,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,4BAA4B;AACxC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,uBAAuB;AACnC,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,oBAAoB;AAChC,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,KAAK;AACZ,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,OAAO,EAAE,4BAA4B;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,YAAY;AAC5B,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,YAAY;AACrB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,OAAO,EAAE,uBAAuB;AAChC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,OAAO,EAAE,oBAAoB;AAC7B,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,QAAQ;AACxB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;AACP,wBAAA,IAAI,EAAE;AACJ,4BAAA,IAAI,EAAE,WAAW;AACjB,4BAAA,SAAS,EAAE,YAAY;AACxB,yBAAA;AACF,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAA6B;AAClD,IAAA,cAAc,EAAE,YAAY;AAC5B,IAAA,OAAO,EAAE,OAAO;AAChB,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,YAAY;AACvB,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,QAAQ,EAAE,IAAI;AACd,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,KAAK,EAAE;AACL,gBAAA,cAAc,EAAE,OAAO;AACvB,gBAAA,OAAO,EAAE,OAAO;AAChB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2CAA2C,GAA6B;AACnF,IAAA,cAAc,EAAE,8CAA8C;AAC9D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6CAA6C;AACxD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,8BAA8B,EAAE;AAC9B,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,uCAAuC,EAAE;AACvC,gBAAA,cAAc,EAAE,gDAAgD;AAChE,gBAAA,OAAO,EAAE,gDAAgD;AACzD,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sCAAsC,GAA6B;AAC9E,IAAA,cAAc,EAAE,yCAAyC;AACzD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wCAAwC;AACnD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iDAAiD,GAA6B;AACzF,IAAA,cAAc,EAAE,oDAAoD;AACpE,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mDAAmD;AAC9D,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAA6B;AAChE,IAAA,cAAc,EAAE,2BAA2B;AAC3C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0BAA0B;AACrC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,YAAY;AACrC,aAAA;AACD,YAAA,yBAAyB,EAAE;AACzB,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,SAAS;AACzB,gBAAA,OAAO,EAAE,SAAS;AAClB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACD,gBAAA,sBAAsB,EAAE,UAAU;AACnC,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,gCAAgC;AAChD,gBAAA,OAAO,EAAE,gCAAgC;AACzC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,2BAA2B,EAAE;AAC3B,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAA6B;AACzD,IAAA,cAAc,EAAE,oBAAoB;AACpC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mBAAmB;AAC9B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAA6B;AAC3D,IAAA,cAAc,EAAE,sBAAsB;AACtC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qBAAqB;AAChC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAA6B;AAC5D,IAAA,cAAc,EAAE,uBAAuB;AACvC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sBAAsB;AACjC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,6BAA6B,GAA6B;AACrE,IAAA,cAAc,EAAE,gCAAgC;AAChD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,+BAA+B;AAC1C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,wBAAwB,EAAE;AACxB,gBAAA,cAAc,EAAE,qCAAqC;AACrD,gBAAA,OAAO,EAAE,qCAAqC;AAC9C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,sBAAsB,EAAE;AACtB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,YAAY,EAAE,SAAS;AACvB,gBAAA,UAAU,EAAE,IAAI;AAChB,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oCAAoC,GAA6B;AAC5E,IAAA,cAAc,EAAE,uCAAuC;AACvD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,sCAAsC;AACjD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,cAAc;wBACd,cAAc;wBACd,gBAAgB;wBAChB,cAAc;wBACd,aAAa;AACd,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,SAAS;wBACT,aAAa;wBACb,WAAW;wBACX,aAAa;wBACb,kBAAkB;AACnB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA6B;AACxD,IAAA,cAAc,EAAE,mBAAmB;AACnC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kBAAkB;AAC7B,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,WAAW;AAC3B,gBAAA,OAAO,EAAE,WAAW;AACpB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,YAAY;oBAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,gBAAgB;AAChC,gBAAA,OAAO,EAAE,gBAAgB;AACzB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,CAAC;AACvD,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,qBAAqB,EAAE;AACrB,gBAAA,cAAc,EAAE,8BAA8B;AAC9C,gBAAA,OAAO,EAAE,8BAA8B;AACvC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,aAAa,EAAE;AACb,gBAAA,cAAc,EAAE,qBAAqB;AACrC,gBAAA,OAAO,EAAE,qBAAqB;AAC9B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,OAAO,CAAC;AACrC,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,WAAW;wBACX,QAAQ;wBACR,SAAS;wBACT,UAAU;wBACV,QAAQ;AACT,qBAAA;AACF,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,QAAQ,EAAE,UAAU,CAAC;AACtC,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,cAAc,EAAE;AACd,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAA6B;AAC1D,IAAA,cAAc,EAAE,qBAAqB;AACrC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oBAAoB;AAC/B,QAAA,eAAe,EAAE;AACf,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,2BAA2B,GAA6B;AACnE,IAAA,cAAc,EAAE,8BAA8B;AAC9C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,6BAA6B;AACxC,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAA6B;AACjE,IAAA,cAAc,EAAE,4BAA4B;AAC5C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2BAA2B;AACtC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kCAAkC,GAA6B;AAC1E,IAAA,cAAc,EAAE,qCAAqC;AACrD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,oCAAoC;AAC/C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yCAAyC,GAA6B;AACjF,IAAA,cAAc,EAAE,4CAA4C;AAC5D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,2CAA2C;AACtD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,kBAAkB,EAAE;AAClB,gBAAA,cAAc,EAAE,2BAA2B;AAC3C,gBAAA,OAAO,EAAE,2BAA2B;AACpC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,MAAM,EAAE;AACN,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;oBACZ,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC;AAC3D,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAA6B;AAC/D,IAAA,cAAc,EAAE,0BAA0B;AAC1C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yBAAyB;AACpC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gCAAgC,GAA6B;AACxE,IAAA,cAAc,EAAE,mCAAmC;AACnD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,kCAAkC;AAC7C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,gBAAgB,EAAE;AAChB,gBAAA,cAAc,EAAE,yBAAyB;AACzC,gBAAA,OAAO,EAAE,yBAAyB;AAClC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,uBAAuB,EAAE;AACvB,gBAAA,cAAc,EAAE,iCAAiC;AACjD,gBAAA,OAAO,EAAE,iCAAiC;AAC1C,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4CAA4C,GAA6B;AACpF,IAAA,cAAc,EAAE,+CAA+C;AAC/D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8CAA8C;AACzD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAA6B;AAC7D,IAAA,cAAc,EAAE,wBAAwB;AACxC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uBAAuB;AAClC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,QAAQ,EAAE;AACR,gBAAA,cAAc,EAAE,kBAAkB;AAClC,gBAAA,OAAO,EAAE,kBAAkB;AAC3B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAA6B;AAC9D,IAAA,cAAc,EAAE,yBAAyB;AACzC,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,wBAAwB;AACnC,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAA6B;AACtE,IAAA,cAAc,EAAE,iCAAiC;AACjD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,gCAAgC;AAC3C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uCAAuC,GAA6B;AAC/E,IAAA,cAAc,EAAE,0CAA0C;AAC1D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,yCAAyC;AACpD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0BAA0B,GAA6B;AAClE,IAAA,cAAc,EAAE,6BAA6B;AAC7C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4BAA4B;AACvC,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mCAAmC,GAA6B;AAC3E,IAAA,cAAc,EAAE,sCAAsC;AACtD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,qCAAqC;AAChD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAA6B;AACzE,IAAA,cAAc,EAAE,oCAAoC;AACpD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,mCAAmC;AAC9C,QAAA,eAAe,EAAE;AACf,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,0CAA0C,GAA6B;AAClF,IAAA,cAAc,EAAE,6CAA6C;AAC7D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,4CAA4C;AACvD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,+BAA+B,GAA6B;AACvE,IAAA,cAAc,EAAE,kCAAkC;AAClD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,iCAAiC;AAC5C,QAAA,eAAe,EAAE;AACf,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,UAAU,EAAE;AACV,gBAAA,cAAc,EAAE,aAAa;AAC7B,gBAAA,OAAO,EAAE,aAAa;AACtB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,oBAAoB;AACpC,gBAAA,OAAO,EAAE,oBAAoB;AAC7B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AAClB,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,+BAA+B;AAC/C,gBAAA,OAAO,EAAE,+BAA+B;AACxC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;AACF,aAAA;AACD,YAAA,mBAAmB,EAAE;AACnB,gBAAA,cAAc,EAAE,4BAA4B;AAC5C,gBAAA,OAAO,EAAE,4BAA4B;AACrC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,uBAAuB;AACvC,gBAAA,OAAO,EAAE,uBAAuB;AAChC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wCAAwC,GAA6B;AAChF,IAAA,cAAc,EAAE,2CAA2C;AAC3D,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,0CAA0C;AACrD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,4BAA4B,GAA6B;AACpE,IAAA,cAAc,EAAE,+BAA+B;AAC/C,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,8BAA8B;AACzC,QAAA,eAAe,EAAE;AACf,YAAA,YAAY,EAAE;AACZ,gBAAA,cAAc,EAAE,eAAe;AAC/B,gBAAA,OAAO,EAAE,eAAe;AACxB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,WAAW,EAAE;AACX,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,iBAAiB,EAAE;AACjB,gBAAA,cAAc,EAAE,0BAA0B;AAC1C,gBAAA,OAAO,EAAE,0BAA0B;AACnC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,eAAe,EAAE;AACf,gBAAA,cAAc,EAAE,wBAAwB;AACxC,gBAAA,OAAO,EAAE,wBAAwB;AACjC,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,OAAO,EAAE;AACP,gBAAA,cAAc,EAAE,cAAc;AAC9B,gBAAA,OAAO,EAAE,cAAc;AACvB,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACD,YAAA,IAAI,EAAE;AACJ,gBAAA,cAAc,EAAE,MAAM;AACtB,gBAAA,OAAO,EAAE,MAAM;AACf,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,iBAAiB;AACxB,iBAAA;AACF,aAAA;AACD,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qCAAqC,GAA6B;AAC7E,IAAA,cAAc,EAAE,wCAAwC;AACxD,IAAA,IAAI,EAAE;AACJ,QAAA,IAAI,EAAE,WAAW;AACjB,QAAA,SAAS,EAAE,uCAAuC;AAClD,QAAA,eAAe,EAAE;AACf,YAAA,SAAS,EAAE;AACT,gBAAA,cAAc,EAAE,iBAAiB;AACjC,gBAAA,OAAO,EAAE,iBAAiB;AAC1B,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,QAAQ;AACf,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;ACnrQD;;;;;;AAMG;AAgBI,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE,uBAAuB;AACtC,IAAA,MAAM,EAAEA,qBAA2B;CACpC,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,GAAG,GAA0B;AACxC,IAAA,aAAa,EAAE,KAAK;AACpB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,KAAK;AACrB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,KAAK;AACd,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;AACD,IAAA,YAAY,EAAE,IAAI;CACnB,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAA4B;AAC3C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAA4B;AACvD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAA4B;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,WAAW,EAAE;AACX,YAAA,gBAAgB,EAAE,CAAC;AACpB,SAAA;AACD,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE,CAAC,UAAU,EAAE,SAAS,EAAE,QAAQ,CAAC;AACjD,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAEC,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAEC,OAAa;CACtB,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,mBAAmB;AACjC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,OAAO;AACvB,QAAA,OAAO,EAAE,OAAO;AAChB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,YAAY;YAClB,KAAK,EAAE,EAAE,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE;AACpC,SAAA;AACD,QAAA,sBAAsB,EAAE,YAAY;AACrC,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,QAAQ,CAAC;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,MAAM,CAAC;AACrC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,wBAAwB;AACzB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,8BAA8B,GAAuB;AAChE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,0BAA0B;QAC1B,gCAAgC;AACjC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,uBAAuB,EAAE,SAAS,CAAC;AAC9D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,iBAAiB,CAAC;AACzE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,mBAAmB,CAAC;AAC3E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,KAAK;AACnB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,YAAY,EAAE,IAAI;AAClB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,SAAS,EAAE,kBAAkB;AAC9B,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAA4B;AAC5C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,qBAAqB;AACpC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,eAAe,CAAC;AAC3C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,SAAS;AACvB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,CAAC;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,UAAU;AAChB,YAAA,OAAO,EAAE;AACP,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,aAAa,EAAE;wBACb,MAAM;wBACN,SAAS;wBACT,UAAU;wBACV,WAAW;wBACX,kBAAkB;wBAClB,UAAU;wBACV,MAAM;wBACN,oBAAoB;wBACpB,WAAW;wBACX,qBAAqB;AACtB,qBAAA;AACF,iBAAA;AACF,aAAA;AACF,SAAA;AACF,KAAA;IACD,gBAAgB,EAAED,8BAAqB,CAAC,GAAG;CAC5C,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAA4B;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,OAAO,CAAC;AACnC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,sBAAsB,CAAC;AAClD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,8BAA8B;AAC9C,QAAA,OAAO,EAAE,8BAA8B;AACvC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,eAAe,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,SAAS,EAAE,qBAAqB,CAAC;AAC5D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,SAAS,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,UAAU;AAC1B,QAAA,OAAO,EAAE,UAAU;AACnB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,aAAa,CAAC;AACrE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,EAAE,QAAQ,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAA4B;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,kBAAkB,CAAC;AACjE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,iBAAiB,CAAC;AAChE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,wBAAwB;AACxC,QAAA,OAAO,EAAE,wBAAwB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;AAC/D,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,mBAAmB,GAAuB;AACrD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,qBAAqB,CAAC;AACpE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,EAAE,wBAAwB,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,sBAAsB;AACpC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qCAAqC;AACrD,QAAA,OAAO,EAAE,qCAAqC;AAC9C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,sBAAsB,GAAuB;AACxD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,wBAAwB,CAAC;AACpD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,QAAQ,CAAC;AACjD,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,uBAAuB;AACvC,QAAA,OAAO,EAAE,uBAAuB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;AACV,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,mBAAmB,CAAC;AAC/C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,MAAM,EAAE,UAAU,CAAC;AACpC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,qBAAqB,GAAuB;AACvD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,uBAAuB;AACxB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iCAAiC;AACjD,QAAA,OAAO,EAAE,iCAAiC;AAC1C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,iBAAiB;AACxB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,aAAa,GAAuB;AAC/C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,eAAe,CAAC;AAC7E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,OAAO,EAAE,sBAAsB;AAC/B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mBAAmB;AACpB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,cAAc,CAAC;AAC5E,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,qBAAqB;AACrC,QAAA,OAAO,EAAE,qBAAqB;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,YAAY;AAC3B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,WAAW;AAC3B,QAAA,OAAO,EAAE,WAAW;AACpB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,UAAU,CAAC;AACtC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,OAAO,EAAE,gBAAgB;AACzB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,WAAW,CAAC;AACvC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,OAAO,EAAE,iBAAiB;AAC1B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,iBAAiB;AAChC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,gBAAgB,GAAuB;AAClD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,kBAAkB,CAAC;AAC9C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,yBAAyB;AACzC,QAAA,OAAO,EAAE,yBAAyB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,gCAAgC;AAChD,QAAA,OAAO,EAAE,gCAAgC;AACzC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gBAAgB,CAAC;AAC5C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AACnC,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,yBAAyB;AACxC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,QAAQ;AACjB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE;gBACb,IAAI;gBACJ,IAAI;gBACJ,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,KAAK;gBACL,MAAM;gBACN,SAAS;AACV,aAAA;AACF,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAEE,YAAkB;CAC3B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,IAAI,GAAuB;AACtC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,MAAM,CAAC;AAClC,IAAA,MAAM,EAAEC,QAAc;CACvB,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,yBAAyB,CAAC;AACrD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,aAAa;AAC7B,QAAA,OAAO,EAAE,aAAa;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,yBAAyB,GAAuB;AAC3D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,2BAA2B,CAAC;AACvD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,oBAAoB;AACpC,QAAA,OAAO,EAAE,oBAAoB;AAC7B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAAuB;AAC1C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAuB;AACnD,IAAA,aAAa,EAAE,mBAAmB;AAClC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,0BAA0B;AAC1C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,0BAA0B;AACnC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,0BAA0B;AACxC,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,KAAK,GAAuB;AACvC,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,MAAM;AACf,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,QAAQ;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,QAAQ;AACtB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,iCAAiC,GAAuB;AACnE,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,mCAAmC;AACpC,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,0BAA0B;AAC3B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,uBAAuB,GAAuB;AACzD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,yBAAyB;AAC1B,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,UAAU,GAAuB;AAC5C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,iBAAiB;AACjC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,WAAW;AAC1B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kBAAkB;AAClC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,kBAAkB;AAC3B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,WAAW,GAAuB;AAC7C,IAAA,aAAa,EAAE,aAAa;AAC5B,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,kBAAkB,GAAuB;AACpD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,oBAAoB,CAAC;AAChD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,2BAA2B;AAC3C,QAAA,OAAO,EAAE,2BAA2B;AACpC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,WAAW;AAClB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,OAAO;AACtB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,YAAY;AAC5B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,YAAY;AACrB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,UAAU;AACxB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAA4B;AACnD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,cAAc,CAAC;AAC1C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,cAAc;AAC9B,QAAA,OAAO,EAAE,cAAc;AACvB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,eAAe,GAAuB;AACjD,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC;AAC7C,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,4BAA4B;AAC5C,QAAA,OAAO,EAAE,4BAA4B;AACrC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,oBAAoB,GAAuB;AACtD,IAAA,aAAa,EAAE,sBAAsB;AACrC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,KAAK,EAAE,QAAQ,EAAE,WAAW,CAAC;AAC9C,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,iBAAiB;AAC/B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,YAAY;AAC1B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,aAAa;AAC3B,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAAuB;AACzC,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,gCAAgC,EAAE,SAAS,CAAC;AACvE,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,6BAA6B;AAC7C,QAAA,OAAO,EAAE,6BAA6B;AACtC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,cAAc,GAAuB;AAChD,IAAA,aAAa,EAAE;QACb,SAAS;QACT,gCAAgC;QAChC,gBAAgB;AACjB,KAAA;AACD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,+BAA+B;AAC/C,QAAA,OAAO,EAAE,+BAA+B;AACxC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,YAAY,GAAuB;AAC9C,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,aAAa,CAAC;AACzC,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,mBAAmB;AACnC,QAAA,OAAO,EAAE,mBAAmB;AAC5B,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,MAAM;AACpB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,SAAS,GAAuB;AAC3C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,gBAAgB;AAChC,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,wBAAwB,GAAuB;AAC1D,IAAA,aAAa,EAAE,CAAC,SAAS,EAAE,0BAA0B,CAAC;AACtD,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,kCAAkC;AAClD,QAAA,OAAO,EAAE,kCAAkC;AAC3C,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,SAAS;AAChB,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,OAAO;AACrB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,OAAO,GAA4B;AAC9C,IAAA,aAAa,EAAE,SAAS;AACxB,IAAA,MAAM,EAAE;AACN,QAAA,cAAc,EAAE,SAAS;AACzB,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,SAAS;AAClB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,MAAM,GAAuB;AACxC,IAAA,aAAa,EAAE,QAAQ;AACvB,IAAA,MAAM,EAAEC,eAAqB;CAC9B,CAAC;AAEK,MAAM,MAAM,GAA4B;AAC7C,IAAA,aAAa,EAAE,MAAM;AACrB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,UAAU,EAAE,IAAI;AAChB,QAAA,cAAc,EAAE,MAAM;AACtB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,QAAQ;AACf,SAAA;AACF,KAAA;CACF,CAAC;AAEK,MAAM,QAAQ,GAA4B;AAC/C,IAAA,aAAa,EAAE,UAAU;AACzB,IAAA,MAAM,EAAE;AACN,QAAA,YAAY,EAAE,WAAW;AACzB,QAAA,cAAc,EAAE,eAAe;AAC/B,QAAA,QAAQ,EAAE,IAAI;AACd,QAAA,OAAO,EAAE,eAAe;AACxB,QAAA,IAAI,EAAE;AACJ,YAAA,IAAI,EAAE,MAAM;AACZ,YAAA,aAAa,EAAE,CAAC,WAAW,EAAE,aAAa,EAAE,KAAK,CAAC;AACnD,SAAA;AACF,KAAA;CACF;;ACluDD;;;;;;AAMG;AA0BH;MACa,OAAO,CAAA;AAGlB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;AAKG;IACH,aAAa,CACX,qBAA4C,EAC5C,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,qBAAqB;YACrB,OAAO,EAAEC,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACc,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,aAAa,CACX,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACc,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACc,CAAC;KACpD;AAED;;;;;AAKG;IACH,oBAAoB,CAClB,OAAgB,EAChB,OAAmD,EAAA;AAEnD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACc,CAAC;KACnD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACc,CAAC;KAC7C;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEF,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBG,0BAAwB,CACc,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEH,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBI,0BAAwB,CACc,CAAC;KAC1C;AACF,CAAA;AACD;AACA,MAAMC,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEM,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEC,YAAoB;YAChC,aAAa,EAAEC,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,qBAAgC;AAC7C,IAAA,eAAe,EAAE;AACf,QAAAC,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEZ,YAAoB;YAChC,aAAa,EAAEa,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAV,OAAkB;AAClB,QAAAC,IAAe;AACf,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEiB,qBAA6B;YACzC,aAAa,EAAEC,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhB,YAAoB;YAChC,aAAa,EAAEiB,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAd,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAAa,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACZ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEqB,6BAAqC;YACjD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpB,YAAoB;YAChC,aAAa,EAAEqB,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhB,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAACpB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE6B,iBAAyB;YACrC,aAAa,EAAEC,kCAA0C;AAC1D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5B,YAAoB;YAChC,aAAa,EAAE6B,2CAAmD;AACnE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,OAAkB;AAC/B,IAAA,eAAe,EAAE;AACf,QAAA3B,OAAkB;AAClB,QAAAE,gBAA2B;AAC3B,QAAA0B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzB,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEZ,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhC,YAAoB;YAChC,aAAa,EAAEiC,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7B,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMF,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEuC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnC,YAAoB;YAChC,aAAa,EAAEoC,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;IAC5B,eAAe,EAAE,CAAChC,gBAA2B,EAAEiC,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAMD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,GAAG;AACT,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4C,iBAAyB;YACrC,aAAa,EAAEC,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1C,YAAoB;YAChC,aAAa,EAAE2C,kCAA0C;AAC1D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtC,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;ACraD;;;;;;AAMG;AA4CH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA8C,EAAA;AAE9C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBC,4BAA0B,CACgB,CAAC;KAC9C;AAED;;;;AAIG;AACH,IAAA,MAAM,CACJ,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAED,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBsD,qBAAmB,CACgB,CAAC;KACvC;AAED;;;AAGG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEtD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBuD,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEvD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;AAIG;AACH,IAAA,eAAe,CACb,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;AAGG;AACH,IAAA,OAAO,CACL,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACgB,CAAC;KACxC;AAED;;;;AAIG;IACH,MAAM,CACJ,mBAA2B,EAC3B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,mBAAmB;YACnB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,oBAA4B,EAC5B,IAA8B,EAC9B,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,oBAAoB;YACpB,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACgB,CAAC;KAC5C;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBwD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAExD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClByD,2BAAyB,CACgB,CAAC;KAC7C;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEzD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB0D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE1D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB2D,yBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAE3D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB4D,0BAAwB,CACgB,CAAC;KAC5C;AAED;;;AAGG;AACH,IAAA,mBAAmB,CACjB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAE5D,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,gCAAgC,CACgB,CAAC;KACpD;AAED;;;;;;;AAOG;IACH,wBAAwB,CACtB,SAAiB,EACjB,OAAyD,EAAA;AAEzD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACgB,CAAC;KACzD;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBE,6BAA2B,CACgB,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMG,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEQ,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtD,YAAoB;YAChC,aAAa,EAAEuD,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAC,MAAiB;AACjB,QAAAC,sBAAiC;AACjC,QAAAC,8BAAyC;AAC1C,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9D,eAAa;CAC1B,CAAC;AACF,MAAMJ,4BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmE,6BAAqC;AACrD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7D,YAAoB;YAChC,aAAa,EAAE8D,sCAA8C;AAC9D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzD,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAMiD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiB,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhE,YAAoB;YAChC,aAAa,EAAEiE,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5D,gBAA2B,EAAEmD,QAAmB,CAAC;AACnE,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAMkD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoB,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpE,YAAoB;YAChC,aAAa,EAAEqE,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAc,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEpE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE;AACJ,oBAAA,IAAI,EAAE,UAAU;AAChB,oBAAA,OAAO,EAAE;wBACP,IAAI,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,SAAS,EAAE,kBAAkB,EAAE;AAC3D,qBAAA;AACF,iBAAA;AACD,gBAAA,cAAc,EAAE,mBAAmB;AACnC,gBAAA,OAAO,EAAE,mBAAmB;AAC5B,gBAAA,YAAY,EAAE,IAAI;AAClB,gBAAA,cAAc,EAAE,kBAAkB;AACnC,aAAA;YACD,aAAa,EAAEyE,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvE,YAAoB;YAChC,aAAa,EAAEwE,wCAAgD;AAChE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAnE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjE,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4E,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1E,YAAoB;YAChC,aAAa,EAAE2E,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAvE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAiB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAgD,MAAiB;AACjB,QAAAK,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAErE,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+E,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7E,YAAoB;YAChC,aAAa,EAAE8E,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzE,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAuB,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAkE,oBAA+B;AAC/B,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnF,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoF,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElF,YAAoB;YAChC,aAAa,EAAEmF,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA9E,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAA4B,KAAgB;AACjB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC9E,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAuE,mBAA8B;AAC9B,QAAAC,aAAwB;AACzB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExF,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEyF,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvF,YAAoB;YAChC,aAAa,EAAEwF,oCAA4C;AAC5D,SAAA;AACF,KAAA;IACD,WAAW,EAAEnD,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAAhC,gBAA2B;AAC3B,QAAAiC,KAAgB;AAChB,QAAAkB,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE1C,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE2C,iBAAyB;YACrC,aAAa,EAAEgD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzF,YAAoB;YAChC,aAAa,EAAE0F,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAArF,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAmB,KAAgB;AAChB,QAAAC,KAAgB;AAChB,QAAAW,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAClD,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMmD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3F,YAAoB;YAChC,aAAa,EAAE4F,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvF,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAElG,eAAa;CAC1B,CAAC;AACF,MAAMoD,2BAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+C,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjG,YAAoB;YAChC,aAAa,EAAEkG,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7F,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACpB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtG,eAAa;CAC1B,CAAC;AACF,MAAMqD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErG,YAAoB;YAChC,aAAa,EAAEsG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzG,eAAa;CAC1B,CAAC;AACF,MAAMsD,yBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoD,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExG,YAAoB;YAChC,aAAa,EAAEyG,mCAA2C;AAC3D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACvB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7G,eAAa;CAC1B,CAAC;AACF,MAAMuD,0BAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuD,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5G,YAAoB;YAChC,aAAa,EAAE6G,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAxG,gBAA2B;AAC3B,QAAAmD,QAAmB;AACnB,QAAAqC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC5B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjH,eAAa;CAC1B,CAAC;AACF,MAAM,gCAAgC,GAA2B;AAC/D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEkH,4BAAoC;YAChD,aAAa,EAAEC,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjH,YAAoB;YAChC,aAAa,EAAEkH,4CAAoD;AACpE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA7G,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7G,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEsH,iCAAyC;YACrD,aAAa,EAAEC,wCAAgD;AAChE,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErH,YAAoB;YAChC,aAAa,EAAEsH,iDAAyD;AACzE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAjH,gBAA2B;AAC3B,QAAAiB,KAAgB;AAChB,QAAAC,MAAiB;AACjB,QAAAC,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAA+B,QAAmB;AACnB,QAAA2D,QAAmB;AACnB,QAAAI,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACjH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAMH,6BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,kBAAkB;AACxB,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6H,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExH,YAAoB;YAChC,aAAa,EAAEyH,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrH,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B;;AC/5BD;;;;;;AAMG;AA0DH;MACa4H,MAAI,CAAA;AAGf;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEjI,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAAyC,EAAA;AAEzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACW,CAAC;KACzC;AAED;;;;;;;;;;;;;;AAcG;AACH,IAAA,MAAM,CAAC,OAAkC,EAAA;AACvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACW,CAAC;KAClC;AAED;;;AAGG;AACH,IAAA,QAAQ,CACN,OAAoC,EAAA;AAEpC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qBAAqB,CACW,CAAC;KACpC;AAED;;;;AAIG;IACH,SAAS,CACP,aAAgC,EAChC,OAAqC,EAAA;AAErC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,sBAAsB,CACW,CAAC;KACrC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;AAGG;AACH,IAAA,qBAAqB,CACnB,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kCAAkC,CACW,CAAC;KACjD;AAED;;;AAGG;AACH,IAAA,wBAAwB,CACtB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,qCAAqC,CACW,CAAC;KACpD;AAED;;;;AAIG;IACH,YAAY,CACV,SAAkB,EAClB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,WAAW,CACT,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,YAAY,CACV,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,YAAY,CACV,OAAe,EACf,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACW,CAAC;KACxC;AAED;;;;;AAKG;IACH,UAAU,CACR,OAAe,EACf,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;;;;;;AAQG;AACH,IAAA,WAAW,CACT,OAAe,EACf,eAAuB,EACvB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,eAAe;YACf,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,UAAU,CACR,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACW,CAAC;KACtC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;;;;AAOG;IACH,gBAAgB,CACd,UAAkB,EAClB,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,WAAW,CACT,UAAkB,EAClB,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACW,CAAC;KACvC;AAED;;;;;;AAMG;IACH,gBAAgB,CACd,MAAc,EACd,OAA4C,EAAA;AAE5C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,6BAA6B,CACW,CAAC;KAC5C;AAED;;;;;;;;AAQG;IACH,OAAO,CACL,IAAgB,EAChB,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,cAAc,CACZ,OAAmC,EAAA;AAEnC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACW,CAAC;KAC1C;AAED;;;;AAIG;AACH,IAAA,KAAK,CAAC,OAAiC,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,kBAAkB,CACW,CAAC;KACjC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AAED;;;AAGG;AACH,IAAA,OAAO,CAAC,OAAmC,EAAA;AACzC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,oBAAoB,CACW,CAAC;KACnC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AAEzE,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEkI,mBAA2B;AAC3C,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3H,YAAoB;YAChC,aAAa,EAAE4H,4BAAoC;AACpD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvH,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAC,kBAA6B;AAC7B,QAAAC,oBAA+B;AAC/B,QAAAC,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0I,wBAAgC;AAChD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExI,YAAoB;YAChC,aAAa,EAAEyI,iCAAyC;AACzD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAApI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxH,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE4I,iBAAyB;AACzC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1I,YAAoB;YAChC,aAAa,EAAE2I,0BAAkC;AAClD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAtI,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAc,cAAyB;AAC1B,KAAA;AACD,IAAA,aAAa,EAAE,CAACtI,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAM,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE/I,eAAa;CAC1B,CAAC;AACF,MAAM,qBAAqB,GAA2B;AACpD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgJ,mBAA2B;AAC3C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9I,YAAoB;YAChC,aAAa,EAAE+I,4BAAoC;AACpD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC1I,gBAA2B,EAAE0E,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAACzE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,sBAAsB,GAA2B;AACrD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkJ,oBAA4B;AAC5C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhJ,YAAoB;YAChC,aAAa,EAAEiJ,6BAAqC;AACrD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC5I,gBAA2B,EAAE6I,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC5I,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqI,aAAwB;AACxB,QAAAC,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtJ,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuJ,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErJ,YAAoB;YAChC,aAAa,EAAEsJ,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClJ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE9J,eAAa;CAC1B,CAAC;AACF,MAAM,kCAAkC,GAA2B;AACjE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+J,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7J,YAAoB;YAChC,aAAa,EAAE8J,yCAAiD;AACjE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzJ,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAqD,iBAA4B;AAC5B,QAAA6F,wBAAmC;AACnC,QAAAC,sBAAiC;AAClC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnK,eAAa;CAC1B,CAAC;AACF,MAAM,qCAAqC,GAA2B;AACpE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,QAAQ;AACpB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEoK,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAElK,YAAoB;YAChC,aAAa,EAAEmK,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC9J,gBAA2B,EAAE0J,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACzJ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AACnB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsK,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpK,YAAoB;YAChC,aAAa,EAAEqK,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChK,gBAA2B,EAAEiK,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAChK,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyJ,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzK,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0K,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExK,YAAoB;YAChC,aAAa,EAAEyK,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACpK,gBAA2B,EAAEiE,KAAgB,CAAC;AAChE,IAAA,aAAa,EAAE,CAAChE,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3K,YAAoB;YAChC,aAAa,EAAE4K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACvK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA2B,MAAiB;AACjB,QAAAC,QAAmB;AACnB,QAAAC,eAA0B;AAC1B,QAAAqC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE+K,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7K,YAAoB;YAChC,aAAa,EAAE8K,gCAAwC;AACxD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACzK,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAgC,OAAkB;AAClB,QAAAC,QAAmB;AACnB,QAAAiC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/K,YAAoB;YAChC,aAAa,EAAEgL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC3K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAG,OAAkB;AAClB,QAAA8B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmL,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEjL,YAAoB;YAChC,aAAa,EAAEkL,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC7K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAiC,QAAmB;AACnB,QAAAU,OAAkB;AAClB,QAAAC,gBAA2B;AAC3B,QAAAsB,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEqL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnL,YAAoB;YAChC,aAAa,EAAEoL,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/K,gBAA2B,EAAEwF,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACvF,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAuC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA0B,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuL,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErL,YAAoB;YAChC,aAAa,EAAEsL,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjL,gBAA2B,EAAEkL,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACjL,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5K,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE0L,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAExL,YAAoB;YAChC,aAAa,EAAEyL,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACpL,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAyB,IAAe;AACf,QAAAC,iBAA4B;AAC5B,QAAAC,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAC,QAAmB;AACnB,QAAAC,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEtM,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEuM,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAErM,YAAoB;YAChC,aAAa,EAAEsM,+BAAuC;AACvD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACjM,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAA2C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAyB,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAE,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAG,eAA0B;AAC1B,QAAAC,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE5M,eAAa;CAC1B,CAAC;AACF,MAAM,6BAA6B,GAA2B;AAC5D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE6M,2BAAmC;AACnD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3M,YAAoB;YAChC,aAAa,EAAE4M,oCAA4C;AAC5D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvM,gBAA2B;AAC3B,QAAAwM,MAAiB;AACjB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACxM,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAgJ,uBAAkC;AACnC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEjN,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEkN,kBAA0B;AAC1C,SAAA;AACD,QAAA,GAAG,EAAE;YACH,aAAa,EAAEA,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhN,YAAoB;YAChC,aAAa,EAAEiN,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5M,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAAoF,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5M,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAAoD,iBAA4B;AAC5B,QAAAwB,KAAgB;AACjB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErN,eAAa;CAC1B,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsN,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpN,YAAoB;YAChC,aAAa,EAAEqN,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjN,IAAe,EAAE8B,QAAmB,CAAC;AACvD,IAAA,aAAa,EAAE,CAAC5B,GAAc,CAAC;IAC/B,gBAAgB,EAAE,CAACG,OAAkB,EAAEK,OAAkB,CAAC;AAC1D,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhB,eAAa;CAC1B,CAAC;AACF,MAAM,kBAAkB,GAA2B;AACjD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,MAAM;AAClB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEwN,gBAAwB;AACxC,SAAA;AACD,QAAA,GAAG,EAAE;AACH,YAAA,UAAU,EAAE;AACV,gBAAA,IAAI,EAAE,EAAE,IAAI,EAAE,QAAQ,EAAE;AACxB,gBAAA,cAAc,EAAE,gBAAgB;AACjC,aAAA;YACD,aAAa,EAAEA,gBAAwB;AACxC,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtN,YAAoB;YAChC,aAAa,EAAEuN,yBAAiC;AACjD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,YAAuB;AACpC,IAAA,eAAe,EAAE;AACf,QAAAnN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACnN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE4N,QAAgB;YAC5B,aAAa,EAAEC,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3N,YAAoB;YAChC,aAAa,EAAE4N,2BAAmC;AACnD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvN,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAAC,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,oBAAoB,GAA2B;AACnD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgO,kBAA0B;AAC1C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE9N,YAAoB;YAChC,aAAa,EAAE+N,2BAAmC;AACnD,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,IAAe;AAC5B,IAAA,eAAe,EAAE;AACf,QAAA3N,gBAA2B;AAC3B,QAAAyH,SAAoB;AACpB,QAAA+F,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvN,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAqD,OAAkB;AAClB,QAAAwE,MAAiB;AACjB,QAAA0F,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAEpO,eAAa;CAC1B;;AChwCD;;;;;;AAMG;AA4BH;MACa,QAAQ,CAAA;AAGnB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;AAMG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,iBAAiB;YACjB,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClBqD,qBAAmB,CACe,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAErD,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACe,CAAC;KAC3C;AAED;;;;AAIG;IACH,UAAU,CACR,aAAqB,EACrB,OAA0C,EAAA;AAE1C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACe,CAAC;KAC1C;AAED;;;;;;;;;;AAUG;IACH,kBAAkB,CAChB,SAAiB,EACjB,WAAmB,EACnB,aAAqB,EACrB,KAAa,EACb,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,WAAW;YACX,aAAa;YACb,KAAK;YACL,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACe,CAAC;KAClD;AAED;;;;AAIG;AACH,IAAA,aAAa,CACX,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,0BAA0B,CACe,CAAC;KAC7C;AAED;;;;AAIG;AACH,IAAA,iBAAiB,CACf,OAAiD,EAAA;AAEjD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACe,CAAC;KACjD;AAED;;;;;AAKG;IACH,MAAM,CACJ,iBAAyB,EACzB,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,iBAAiB;YACjB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACe,CAAC;KACtC;AAED;;;;;;AAMG;IACH,oBAAoB,CAClB,oBAA8C,EAC9C,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,oBAAoB;YACpB,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iCAAiC,CACe,CAAC;KACpD;AAED;;;;;;;;;;;AAWG;IACH,eAAe,CACb,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACe,CAAC;KAC/C;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAMqD,qBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsL,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpO,YAAoB;YAChC,aAAa,EAAEqO,8BAAsC;AACtD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAAChO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAAkC,QAAmB;AACnB,QAAAC,iBAA4B;AAC5B,QAAAC,kBAA6B;AAC9B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1O,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2O,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzO,YAAoB;YAChC,aAAa,EAAE0O,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AACnC,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBf,YAAU;CACX,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,yBAAiC;AACjD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEnP,YAAoB;YAChC,aAAa,EAAEoP,kCAA0C;AAC1D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAC/O,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAG,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAsE,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAG,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEvP,eAAa;CAC1B,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwP,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtP,YAAoB;YAChC,aAAa,EAAEuP,0CAAkD;AAClE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAClP,gBAA2B,EAAEuO,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACtO,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAsC,SAAoB;AACpB,QAAAC,iCAA4C;AAC5C,QAAAC,wBAAmC;AACnC,QAAAC,uBAAkC;AAClC,QAAAM,SAAoB;AACpB,QAAAC,WAAsB;AACtB,QAAAC,kBAA6B;AAC7B,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE7P,eAAa;CAC1B,CAAC;AACF,MAAM,0BAA0B,GAA2B;AACzD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7P,YAAoB;YAChC,aAAa,EAAE8P,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAzP,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AAClB,KAAA;AACD,IAAA,aAAa,EAAE,CAACzP,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzI,eAAa;CAC1B,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAE8P,QAAgB;YAC5B,aAAa,EAAEI,gCAAwC;AACxD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhQ,YAAoB;YAChC,aAAa,EAAEiQ,yCAAiD;AACjE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAA5P,gBAA2B;AAC3B,QAAAmB,MAAiB;AACjB,QAAAC,WAAsB;AACtB,QAAAoG,QAAmB;AACnB,QAAAkI,MAAiB;AACjB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC5P,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA4D,KAAgB;AAChB,QAAAM,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA4H,eAA0B;AAC3B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErQ,eAAa;CAC1B,CAAC;AACF,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsQ,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpQ,YAAoB;YAChC,aAAa,EAAEqQ,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACjQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAA6D,iBAA4B;AAC7B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEzO,eAAa;CAC1B,CAAC;AACF,MAAM,iCAAiC,GAA2B;AAChE,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEwQ,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtQ,YAAoB;YAChC,aAAa,EAAEuQ,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnQ,IAAe,EAAEC,gBAA2B,CAAC;AAC/D,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAiG,kBAA6B;AAC7B,QAAAgC,oBAA+B;AAChC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE1Q,eAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAE2Q,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEzQ,YAAoB;YAChC,aAAa,EAAE0Q,uCAA+C;AAC/D,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACrQ,gBAA2B,EAAEsQ,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACrQ,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAoD,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAA0D,UAAqB;AACtB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEnM,eAAa;CAC1B;;ACzkBD;;;;;;AAMG;AAiBH;MACa,UAAU,CAAA;AAGrB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;AAIG;IACH,MAAM,CACJ,aAAqB,EACrB,OAAwC,EAAA;AAExC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACiB,CAAC;KACxC;AAED;;;;;;;AAOG;AACH,IAAA,WAAW,CACT,aAAqB,EACrB,IAA8B,EAC9B,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,wBAAwB,CACiB,CAAC;KAC7C;AAED;;;;;;;;AAQG;AACH,IAAA,kBAAkB,CAChB,SAAiB,EACjB,aAAqB,EACrB,OAAoD,EAAA;AAEpD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,SAAS;YACT,aAAa;YACb,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,+BAA+B,CACiB,CAAC;KACpD;AAED;;;;AAIG;AACH,IAAA,IAAI,CACF,OAAsC,EAAA;AAEtC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,iBAAiB,CACiB,CAAC;KACtC;AACF,CAAA;AACD;AACA,MAAMK,eAAa,GAAG,IAAIL,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM0O,YAAU,GAAG,IAAI1O,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEmR,uBAA+B;AAC/C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE5Q,YAAoB;YAChC,aAAa,EAAE6Q,gCAAwC;AACxD,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACxQ,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAwB,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA0E,SAAoB;AACrB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAEhR,eAAa;CAC1B,CAAC;AACF,MAAM,wBAAwB,GAA2B;AACvD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiR,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE/Q,YAAoB;YAChC,aAAa,EAAEgR,qCAA6C;AAC7D,SAAA;AACF,KAAA;IACD,WAAW,EAAErC,KAAgB;IAC7B,eAAe,EAAE,CAACtO,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAAoC,OAAkB;AAClB,QAAAC,cAAyB;AAC1B,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;gBACnBhD,YAAU;CACX,CAAC;AACF,MAAM,+BAA+B,GAA2B;AAC9D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiD,mCAA2C;AAC3D,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpR,YAAoB;YAChC,aAAa,EAAEqR,4CAAoD;AACpE,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAAChR,gBAA2B,EAAE4Q,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAAC3Q,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAmC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAwB,uBAAkC;AAClC,QAAAuB,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAAwB,OAAkB;AAClB,QAAAC,cAAyB;AACzB,QAAAG,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAExR,eAAa;CAC1B,CAAC;AACF,MAAM,iBAAiB,GAA2B;AAChD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEyR,qBAA6B;AAC7C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEvR,YAAoB;YAChC,aAAa,EAAEwR,8BAAsC;AACtD,SAAA;AACF,KAAA;IACD,eAAe,EAAE,CAACnR,gBAA2B,EAAEoR,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnR,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAAkE,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAA6I,cAAyB;AAC1B,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAErR,eAAa;CAC1B;;ACxRD;;;;;;AAMG;AAuBH;MACa,SAAS,CAAA;AAGpB;;;AAGG;AACH,IAAA,WAAA,CAAY,MAA4B,EAAA;AACtC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;KACtB;AAED;;;;;;;;AAQG;AACH,IAAA,MAAM,CACJ,aAAqB,EACrB,IAA8B,EAC9B,OAAuC,EAAA;AAEvC,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,IAAI;YACJ,OAAO,EAAEL,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,mBAAmB,CACgB,CAAC;KACvC;AAED;;;;;;;;;;;;AAYG;AACH,IAAA,cAAc,CACZ,aAAqB,EACrB,UAAkB,EAClB,OAA+C,EAAA;AAE/C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,aAAa;YACb,UAAU;YACV,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,2BAA2B,CACgB,CAAC;KAC/C;AAED;;;;;;;;AAQG;AACH,IAAA,UAAU,CACR,OAAe,EACf,aAAqB,EACrB,IAA8B,EAC9B,OAA2C,EAAA;AAE3C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,IAAI;YACJ,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,uBAAuB,CACgB,CAAC;KAC3C;AAED;;;;;;;;;AASG;AACH,IAAA,iBAAiB,CACf,OAAe,EACf,aAAqB,EACrB,SAAiB,EACjB,OAAkD,EAAA;AAElD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,OAAO;YACP,aAAa;YACb,SAAS;YACT,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,8BAA8B,CACgB,CAAC;KAClD;AAED;;;;;;;;;;AAUG;IACH,eAAe,CACb,MAAuB,EACvB,OAAgD,EAAA;AAEhD,QAAA,MAAM,kBAAkB,GAAgC;YACtD,MAAM;YACN,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,4BAA4B,CACgB,CAAC;KAChD;AAED;;;;;;AAMG;IACH,YAAY,CACV,QAAuB,EACvB,OAA6C,EAAA;AAE7C,QAAA,MAAM,kBAAkB,GAAgC;YACtD,QAAQ;YACR,OAAO,EAAEA,mBAAQ,CAAC,oCAAoC,CAAC,OAAO,IAAI,EAAE,CAAC;SACtE,CAAC;QACF,OAAO,IAAI,CAAC,MAAM,CAAC,oBAAoB,CACrC,kBAAkB,EAClB,yBAAyB,CACgB,CAAC;KAC7C;AACF,CAAA;AACD;AACA,MAAM,aAAa,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,IAAI,CAAC,CAAC;AACzE,MAAM,UAAU,GAAG,IAAIA,mBAAQ,CAAC,UAAU,CAAC,OAAO,cAAc,KAAK,CAAC,CAAC;AAEvE,MAAM,mBAAmB,GAA2B;AAClD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEiS,sBAA8B;AAC9C,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE1R,YAAoB;YAChC,aAAa,EAAE2R,+BAAuC;AACvD,SAAA;AACF,KAAA;IACD,WAAW,EAAEhD,KAAgB;AAC7B,IAAA,eAAe,EAAE,CAACtO,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAY,YAAuB;AACvB,QAAAC,OAAkB;AAClB,QAAA8C,SAAoB;AACrB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,2BAA2B,GAA2B;AAC1D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,8BAAsC;AACtD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE7R,YAAoB;YAChC,aAAa,EAAE8R,uCAA+C;AAC/D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE,CAACzR,gBAA2B,CAAC;AAC9C,IAAA,aAAa,EAAE,CAACC,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAkB,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAc,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAE,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAC,YAAuB;AACvB,QAAAC,UAAqB;AACrB,QAAAC,cAAyB;AACzB,QAAAM,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAAC,cAAyB;AACzB,QAAAuB,uBAAkC;AAClC,QAAA2D,SAAoB;AACpB,QAAAG,wBAAmC;AACpC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,uBAAuB,GAA2B;AACtD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEC,0BAAkC;AAClD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEhS,YAAoB;YAChC,aAAa,EAAEiS,mCAA2C;AAC3D,SAAA;AACF,KAAA;IACD,WAAW,EAAEtD,KAAgB;AAC7B,IAAA,eAAe,EAAE;AACf,QAAAtO,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA6B,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAuD,uBAAkC;AAClC,QAAAC,yBAAoC;AACpC,QAAAW,YAAuB;AACvB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,SAAS,EAAE,QAAQ;IACnB,UAAU;CACX,CAAC;AACF,MAAM,8BAA8B,GAA2B;AAC7D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEsD,iCAAyC;AACzD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEpS,YAAoB;YAChC,aAAa,EAAEqS,0CAAkD;AAClE,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAhS,gBAA2B;AAC3B,QAAA6R,MAAiB;AACjB,QAAAC,OAAkB;AACnB,KAAA;AACD,IAAA,aAAa,EAAE,CAAC7R,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAyB,aAAwB;AACxB,QAAAwB,OAAkB;AAClB,QAAAmE,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAsC,eAA0B;AAC1B,QAAAkB,qBAAgC;AAChC,QAAAC,uBAAkC;AAClC,QAAAC,aAAwB;AACxB,QAAAC,iBAA4B;AAC5B,QAAAS,gBAA2B;AAC3B,QAAAC,uBAAkC;AAClC,QAAA+C,SAAoB;AACpB,QAAAE,kBAA6B;AAC7B,QAAA4B,YAAuB;AACxB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,4BAA4B,GAA2B;AAC3D,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,aAAa,EAAEgB,+BAAuC;AACvD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAEtS,YAAoB;YAChC,aAAa,EAAEuS,wCAAgD;AAChE,SAAA;AACF,KAAA;IACD,WAAW,EAAEC,MAAiB;IAC9B,eAAe,EAAE,CAACnS,gBAA2B,EAAEoS,MAAiB,CAAC;AACjE,IAAA,aAAa,EAAE,CAACnS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAC,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAA+C,QAAmB;AACnB,QAAAM,OAAkB;AAClB,QAAAG,eAA0B;AAC1B,QAAAC,iBAA4B;AAC5B,QAAA+D,aAAwB;AACxB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,OAAkB;AAClB,QAAAC,WAAsB;AACtB,QAAAC,MAAiB;AACjB,QAAAgB,gBAA2B;AAC3B,QAAAC,eAA0B;AAC1B,QAAAC,cAAyB;AACzB,QAAAC,mBAA8B;AAC9B,QAAAC,mBAA8B;AAC9B,QAAAC,sBAAiC;AACjC,QAAAI,wBAAmC;AACnC,QAAAC,sBAAiC;AACjC,QAAAS,eAA0B;AAC1B,QAAAgB,IAAe;AACf,QAAAQ,cAAyB;AACzB,QAAAE,UAAqB;AACrB,QAAA6B,uBAAkC;AAClC,QAAAC,yBAAoC;AACrC,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,WAAW,EAAE,gCAAgC;AAC7C,IAAA,SAAS,EAAE,KAAK;AAChB,IAAA,UAAU,EAAE,aAAa;CAC1B,CAAC;AACF,MAAM,yBAAyB,GAA2B;AACxD,IAAA,IAAI,EAAE,yBAAyB;AAC/B,IAAA,UAAU,EAAE,KAAK;AACjB,IAAA,SAAS,EAAE;AACT,QAAA,GAAG,EAAE;YACH,UAAU,EAAEwE,SAAiB;YAC7B,aAAa,EAAEC,4BAAoC;AACpD,SAAA;AACD,QAAA,OAAO,EAAE;YACP,UAAU,EAAE3S,YAAoB;YAChC,aAAa,EAAE4S,qCAA6C;AAC7D,SAAA;AACF,KAAA;AACD,IAAA,eAAe,EAAE;AACf,QAAAvS,gBAA2B;AAC3B,QAAAwH,QAAmB;AACnB,QAAA4K,MAAiB;AACjB,QAAAI,QAAmB;AACpB,KAAA;AACD,IAAA,aAAa,EAAE,CAACvS,GAAc,CAAC;AAC/B,IAAA,gBAAgB,EAAE;AAChB,QAAAG,OAAkB;AAClB,QAAAC,SAAoB;AACpB,QAAAI,OAAkB;AAClB,QAAAiD,OAAkB;AAClB,QAAAwE,MAAiB;AAClB,KAAA;AACD,IAAA,KAAK,EAAE,IAAI;AACX,IAAA,UAAU,EAAE,aAAa;CAC1B;;AC3cD;AAKA;;AAEG;MACU,MAAM,GAAGuK,2BAAkB,CAAC,cAAc;;ACRvD;AACA;AAEO,MAAM,WAAW,GAAW,SAAS,CAAC;AACtC,MAAM,eAAe,GAAW,YAAY,CAAC;AAE7C,MAAM,gCAAgC,GAAW,GAAG,GAAG,IAAI,GAAG,IAAI,CAAC;AACnE,MAAM,gCAAgC,GAAW,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC;AACpE,MAAM,qBAAqB,GAAW,KAAK,CAAC;AAC5C,MAAM,+BAA+B,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAChE,MAAM,iCAAiC,GAAW,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClE,MAAM,mCAAmC,GAAW,CAAC,CAAC;AAC7D;;AAEG;AACI,MAAM,kBAAkB,GAAsB,qCAAqC;AAEnF,MAAM,YAAY,GAAG;AAC1B,IAAA,UAAU,EAAE;AACV,QAAA,sBAAsB,EAAE,GAAG;AAC3B,QAAA,SAAS,EAAE,KAAK;AAChB,QAAA,QAAQ,EAAE,UAAU;AACpB,QAAA,SAAS,EAAE,WAAW;AACtB,QAAA,OAAO,EAAE,SAAS;AACnB,KAAA;CACF,CAAC;AAEK,MAAM,iBAAiB,GAAG;AAC/B,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,aAAa,EAAE,GAAG;AAClB,IAAA,cAAc,EAAE,GAAG;AACnB,IAAA,kBAAkB,EAAE,GAAG;AACvB,IAAA,0BAA0B,EAAE,GAAG;CAChC,CAAC;AAEK,MAAM,eAAe,GAAG;AAC7B,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,oBAAoB,EAAE,QAAQ;AAC9B,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,cAAc,EAAE,gBAAgB;AAChC,IAAA,WAAW,EAAE,aAAa;AAC1B,IAAA,yBAAyB,EAAE,2BAA2B;AACtD,IAAA,YAAY,EAAE,cAAc;AAC5B,IAAA,MAAM,EAAE,QAAQ;AAChB,IAAA,IAAI,EAAE,MAAM;AACZ,IAAA,QAAQ,EAAE,UAAU;AACpB,IAAA,iBAAiB,EAAE,mBAAmB;AACtC,IAAA,aAAa,EAAE,eAAe;AAC9B,IAAA,mBAAmB,EAAE,qBAAqB;AAC1C,IAAA,kBAAkB,EAAE,OAAO;AAC3B,IAAA,KAAK,EAAE,OAAO;AACd,IAAA,UAAU,EAAE,YAAY;AACxB,IAAA,sBAAsB,EAAE,wBAAwB;AAChD,IAAA,gBAAgB,EAAE,kBAAkB;AACpC,IAAA,SAAS,EAAE,WAAW;AACtB,IAAA,eAAe,EAAE,iBAAiB;AAClC,IAAA,YAAY,EAAE,cAAc;CAC7B,CAAC;AAEK,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,MAAM,OAAO,GAAG,GAAG,CAAC;AAEpB,MAAM,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAClC,MAAM,iBAAiB,GAAG,GAAG,CAAC;AAC9B,MAAM,0BAA0B,GAAG,CAAC,GAAG,SAAS,CAAC;AACjD,MAAM,gBAAgB,GAAG,MAAM,CAAC;AAChC,MAAM,gBAAgB,GAAG,UAAU,CAAC;AAEpC,MAAM,wBAAwB,GAAG,QAAQ,CAAC;AAE1C,MAAM,2BAA2B,GAAG,sNAAsN,CAAC;AAE3P,MAAM,oCAAoC,GAAG;IAClD,6BAA6B;IAC7B,eAAe;IACf,gBAAgB;IAChB,cAAc;IACd,MAAM;IACN,YAAY;IACZ,aAAa;IACb,mBAAmB;IACnB,YAAY;IACZ,wBAAwB;IACxB,WAAW;IACX,iBAAiB;IACjB,iBAAiB;IACjB,+BAA+B;IAC/B,cAAc;IACd,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,kBAAkB;IAClB,aAAa;IACb,eAAe;IACf,MAAM;IACN,eAAe;IACf,QAAQ;IACR,MAAM;IACN,oBAAoB;IACpB,kBAAkB;IAClB,2BAA2B;IAC3B,cAAc;IACd,oBAAoB;IACpB,kBAAkB;IAClB,8BAA8B;IAC9B,qBAAqB;IACrB,kBAAkB;IAClB,mBAAmB;IACnB,YAAY;IACZ,+BAA+B;IAC/B,uBAAuB;IACvB,eAAe;IACf,mBAAmB;IACnB,UAAU;IACV,mBAAmB;IACnB,eAAe;IACf,qBAAqB;IACrB,kBAAkB;IAClB,8BAA8B;IAC9B,2BAA2B;IAC3B,mBAAmB;IACnB,qBAAqB;IACrB,yBAAyB;IACzB,yBAAyB;IACzB,iCAAiC;IACjC,+BAA+B;IAC/B,6BAA6B;IAC7B,+BAA+B;IAC/B,4BAA4B;IAC5B,4BAA4B;IAC5B,0BAA0B;IAC1B,uBAAuB;IACvB,wBAAwB;IACxB,yBAAyB;IACzB,2BAA2B;IAC3B,gBAAgB;IAChB,gCAAgC;IAChC,oBAAoB;IACpB,+BAA+B;IAC/B,uBAAuB;IACvB,4BAA4B;IAC5B,qCAAqC;IACrC,2BAA2B;IAC3B,4BAA4B;IAC5B,4BAA4B;IAC5B,4BAA4B;IAC5B,uBAAuB;IACvB,mBAAmB;IACnB,yBAAyB;IACzB,qBAAqB;IACrB,eAAe;IACf,iBAAiB;IACjB,iBAAiB;IACjB,wBAAwB;IACxB,4BAA4B;IAC5B,yBAAyB;IACzB,6BAA6B;IAC7B,eAAe;IACf,yBAAyB;IACzB,sBAAsB;IACtB,+BAA+B;IAC/B,2BAA2B;IAC3B,iCAAiC;IACjC,gBAAgB;IAChB,4BAA4B;IAC5B,cAAc;IACd,qBAAqB;CACtB,CAAC;AAEK,MAAM,wCAAwC,GAAG;IACtD,MAAM;IACN,YAAY;IACZ,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,MAAM;IACN,IAAI;IACJ,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,KAAK;IACL,IAAI;IACJ,IAAI;IACJ,IAAI;IACJ,SAAS;IACT,QAAQ;IACR,QAAQ;IACR,QAAQ;IACR,SAAS;IACT,SAAS;IACT,eAAe;IACf,WAAW;IACX,cAAc;IACd,KAAK;IACL,OAAO;IACP,KAAK;IACL,KAAK;IACL,OAAO;IACP,KAAK;IACL,UAAU;CACX,CAAC;AAEK,MAAM,sCAAsC,GAAG,qCAAqC,CAAC;AACrF,MAAM,yCAAyC,GACpD,2CAA2C;;ACjN7C;AAmDA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmDG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,MAAM,SAAS,GAAGC,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,GAAG,CAAC;AAEnB,IAAA,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC;AACpB,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAExB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAWD,SAAS,4BAA4B,CAAC,gBAAwB,EAAA;;;IAG5D,IAAI,QAAQ,GAAG,EAAE,CAAC;IAClB,IAAI,gBAAgB,CAAC,MAAM,CAAC,6BAA6B,CAAC,KAAK,CAAC,CAAC,EAAE;;QAEjE,MAAM,gBAAgB,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACrD,QAAA,KAAK,MAAM,OAAO,IAAI,gBAAgB,EAAE;YACtC,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,6BAA6B,CAAC,EAAE;AAC5D,gBAAA,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,iCAAiC,CAAE,CAAC,CAAC,CAAC,CAAC;AACxE,aAAA;AACF,SAAA;AACF,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAEe,SAAA,oBAAoB,CAClC,gBAAwB,EACxB,QAM2B,EAAA;IAE3B,MAAM,QAAQ,GAAG,gBAAgB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC7C,IAAA,KAAK,MAAM,OAAO,IAAI,QAAQ,EAAE;QAC9B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;AACvC,YAAA,OAAO,OAAO,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,QAAQ,GAAG,OAAO,CAAE,CAAC,CAAC,CAAC,CAAC;AACrD,SAAA;AACF,KAAA;AACD,IAAA,OAAO,EAAE,CAAC;AACZ,CAAC;AAED;;;;;AAKG;AACG,SAAU,4BAA4B,CAAC,gBAAwB,EAAA;IACnE,IAAI,QAAQ,GAAG,EAAE,CAAC;AAElB,IAAA,IAAI,gBAAgB,CAAC,UAAU,CAAC,4BAA4B,CAAC,EAAE;;AAE7D,QAAA,QAAQ,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;QAC1D,gBAAgB,GAAG,2BAA2B,CAAC;AAChD,KAAA;;IAGD,IAAI,YAAY,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,cAAc,CAAC,CAAC;;;IAG1E,YAAY,GAAG,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC;IAErF,IACE,gBAAgB,CAAC,MAAM,CAAC,2BAA2B,CAAC,KAAK,CAAC,CAAC;QAC3D,gBAAgB,CAAC,MAAM,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,EAC7C;;QAGA,IAAI,wBAAwB,GAAG,EAAE,CAAC;QAClC,IAAI,WAAW,GAAG,EAAE,CAAC;QACrB,IAAI,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,YAAY,EAAE,QAAQ,CAAC,CAAC;QACrD,IAAI,cAAc,GAAG,EAAE,CAAC;;AAGxB,QAAA,WAAW,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,aAAa,CAAC,CAAC;AACpE,QAAA,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,oBAAoB,CAAC,gBAAgB,EAAE,YAAY,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEzF,IAAI,CAAC,YAAY,EAAE;;;AAIjB,YAAA,wBAAwB,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,0BAA0B,CAAC,CAAC;AAC9F,YAAA,MAAM,QAAQ,GAAG,wBAAyB,CAAC,WAAW,EAAE,CAAC;AACzD,YAAA,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;AAC/C,gBAAA,MAAM,IAAI,KAAK,CACb,iGAAiG,CAClG,CAAC;AACH,aAAA;AAED,YAAA,cAAc,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;YAC1E,IAAI,CAAC,cAAc,EAAE;AACnB,gBAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,aAAA;YACD,YAAY,GAAG,GAAG,wBAAwB,CAAA,GAAA,EAAM,WAAW,CAAS,MAAA,EAAA,cAAc,EAAE,CAAC;AACtF,SAAA;QAED,IAAI,CAAC,WAAW,EAAE;AAChB,YAAA,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;AAC1E,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAClC,YAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;AACzE,SAAA;QAED,OAAO;AACL,YAAA,IAAI,EAAE,mBAAmB;AACzB,YAAA,GAAG,EAAE,YAAY;YACjB,WAAW;YACX,UAAU;YACV,QAAQ;SACT,CAAC;AACH,KAAA;AAAM,SAAA;;QAGL,MAAM,UAAU,GAAG,oBAAoB,CAAC,gBAAgB,EAAE,uBAAuB,CAAC,CAAC;AACnF,QAAA,MAAM,WAAW,GAAG,qBAAqB,CAAC,YAAY,CAAC,CAAC;QACxD,IAAI,CAAC,YAAY,EAAE;AACjB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;aAAM,IAAI,CAAC,UAAU,EAAE;AACtB,YAAA,MAAM,IAAI,KAAK,CAAC,qEAAqE,CAAC,CAAC;AACxF,SAAA;AAED,QAAA,OAAO,EAAE,IAAI,EAAE,eAAe,EAAE,GAAG,EAAE,YAAY,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC;AAC9E,KAAA;AACH,CAAC;AAED;;;;AAIG;AACH,SAAS,MAAM,CAAC,IAAY,EAAA;IAC1B,OAAO,kBAAkB,CAAC,IAAI,CAAC;AAC5B,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC;AACpB,SAAA,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC;AACpB,SAAA,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC;AACrB,SAAA,OAAO,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAC1B,CAAC;AAED;;;;;;;AAOG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,IAAI,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;AAC/B,IAAA,IAAI,GAAG,IAAI,IAAI,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAA,EAAG,IAAI,CAAA,EAAG,IAAI,CAAA,CAAE,GAAG,CAAG,EAAA,IAAI,CAAI,CAAA,EAAA,IAAI,EAAE,IAAI,IAAI,CAAC;AACjF,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAExB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;;AAQG;SACa,eAAe,CAAC,GAAW,EAAE,IAAY,EAAE,KAAc,EAAA;IACvE,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,iBAAiB,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC;AACzC,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;AAKG;AACa,SAAA,eAAe,CAAC,GAAW,EAAE,IAAY,EAAA;IACvD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,sBAAsB,CAAC,IAAI,CAAC,CAAC;AAChD,CAAC;AAED;;;;;;AAMG;AACa,SAAA,UAAU,CAAC,GAAW,EAAE,IAAY,EAAA;IAClD,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACxB,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,GAAW,EAAA;IACpC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,OAAO,EAAE,CAAC;AAC7B,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,GAAW,EAAA;IACtC,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,OAAO,SAAS,CAAC,SAAS,EAAE,CAAC;AAC/B,CAAC;AAED;;;;AAIG;AACG,SAAU,kBAAkB,CAAC,GAAW,EAAA;IAC5C,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACxC,IAAA,MAAM,UAAU,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC;IACvC,IAAI,CAAC,UAAU,EAAE;AACf,QAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAC;AACzD,KAAA;IAED,IAAI,WAAW,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,CAAC;AAC7C,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,IAAI,WAAW,KAAK,EAAE,EAAE;AACtB,QAAA,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,GAAG,CAAA,CAAA,EAAI,WAAW,CAAE,CAAA,CAAC;AAC7E,KAAA;AAED,IAAA,OAAO,CAAG,EAAA,UAAU,CAAG,EAAA,WAAW,EAAE,CAAC;AACvC,CAAC;AAED;;;;AAIG;AACG,SAAU,aAAa,CAAC,GAAW,EAAA;IACvC,IAAI,WAAW,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,CAAC;IACnD,IAAI,CAAC,WAAW,EAAE;AAChB,QAAA,OAAO,EAAE,CAAC;AACX,KAAA;AAED,IAAA,WAAW,GAAG,WAAW,CAAC,IAAI,EAAE,CAAC;IACjC,WAAW,GAAG,WAAW,CAAC,UAAU,CAAC,GAAG,CAAC,GAAG,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,WAAW,CAAC;IAEhF,IAAI,eAAe,GAAa,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACvD,eAAe,GAAG,eAAe,CAAC,MAAM,CAAC,CAAC,KAAa,KAAI;QACzD,MAAM,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QACxC,MAAM,gBAAgB,GAAG,KAAK,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;AAChD,QAAA,QACE,YAAY,GAAG,CAAC,IAAI,YAAY,KAAK,gBAAgB,IAAI,gBAAgB,GAAG,KAAK,CAAC,MAAM,GAAG,CAAC,EAC5F;AACJ,KAAC,CAAC,CAAC;IAEH,MAAM,OAAO,GAA8B,EAAE,CAAC;AAC9C,IAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;QAC5C,MAAM,YAAY,GAAG,cAAc,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAC/C,QAAA,MAAM,GAAG,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACpC,QAAA,MAAM,KAAK,GAAW,YAAY,CAAC,CAAC,CAAC,CAAC;AACtC,QAAA,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;AACtB,KAAA;AAED,IAAA,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;;;;;AAMG;AACa,SAAA,gBAAgB,CAAC,GAAW,EAAE,UAAkB,EAAA;IAC9D,MAAM,SAAS,GAAGA,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AAExC,IAAA,IAAI,KAAK,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;AACjC,IAAA,IAAI,KAAK,EAAE;AACT,QAAA,KAAK,IAAI,GAAG,GAAG,UAAU,CAAC;AAC3B,KAAA;AAAM,SAAA;QACL,KAAK,GAAG,UAAU,CAAC;AACpB,KAAA;AAED,IAAA,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;AAC1B,IAAA,OAAO,SAAS,CAAC,QAAQ,EAAE,CAAC;AAC9B,CAAC;AAED;;;;;;;AAOG;SACa,oBAAoB,CAAC,IAAU,EAAE,mBAA4B,IAAI,EAAA;;AAE/E,IAAA,MAAM,UAAU,GAAG,IAAI,CAAC,WAAW,EAAE,CAAC;AAEtC,IAAA,OAAO,gBAAgB;AACrB,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,MAAM,GAAG,GAAG;AAC/D,UAAE,UAAU,CAAC,SAAS,CAAC,CAAC,EAAE,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,GAAG,GAAG,CAAC;AAC3D,CAAC;AAED;;;;AAIG;AACG,SAAU,YAAY,CAAC,OAAe,EAAA;IAC1C,OAAO,CAACC,eAAM,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AAC3E,CAAC;AAWD;;;;AAIG;AACa,SAAA,eAAe,CAAC,aAAqB,EAAE,UAAkB,EAAA;;IAEvE,MAAM,qBAAqB,GAAG,EAAE,CAAC;;IAGjC,MAAM,mBAAmB,GAAG,CAAC,CAAC;AAE9B,IAAA,MAAM,6BAA6B,GAAG,qBAAqB,GAAG,mBAAmB,CAAC;AAElF,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,6BAA6B,EAAE;QACxD,aAAa,GAAG,aAAa,CAAC,KAAK,CAAC,CAAC,EAAE,6BAA6B,CAAC,CAAC;AACvE,KAAA;IACD,MAAM,GAAG,GACP,aAAa;AACb,QAAA,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,EAAE,qBAAqB,GAAG,aAAa,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AACrF,IAAA,OAAO,YAAY,CAAC,GAAG,CAAC,CAAC;AAC3B,CAAC;AAED;;;;;;AAMG;AACI,eAAe,KAAK,CACzB,QAAgB,EAChB,OAAyB,EACzB,UAAkB,EAAA;IAElB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;;AAE3C,QAAA,IAAI,OAAY,CAAC;QAEjB,MAAM,YAAY,GAAG,MAAK;YACxB,IAAI,OAAO,KAAK,SAAS,EAAE;gBACzB,YAAY,CAAC,OAAO,CAAC,CAAC;AACvB,aAAA;YACD,MAAM,CAAC,UAAU,CAAC,CAAC;AACrB,SAAC,CAAC;QAEF,MAAM,cAAc,GAAG,MAAK;YAC1B,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,gBAAA,OAAO,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACpD,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC;AAEF,QAAA,OAAO,GAAG,UAAU,CAAC,cAAc,EAAE,QAAQ,CAAC,CAAC;QAE/C,IAAI,OAAO,KAAK,SAAS,EAAE;AACzB,YAAA,OAAO,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjD,SAAA;AACH,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;AAMG;AACG,SAAU,QAAQ,CACtB,aAAqB,EACrB,YAAoB,EACpB,YAAoB,GAAG,EAAA;;AAGvB,IAAA,IAAI,MAAM,CAAC,SAAS,CAAC,QAAQ,EAAE;QAC7B,OAAO,aAAa,CAAC,QAAQ,CAAC,YAAY,EAAE,SAAS,CAAC,CAAC;AACxD,KAAA;AAED,IAAA,SAAS,GAAG,SAAS,IAAI,GAAG,CAAC;AAC7B,IAAA,IAAI,aAAa,CAAC,MAAM,GAAG,YAAY,EAAE;AACvC,QAAA,OAAO,aAAa,CAAC;AACtB,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG,YAAY,GAAG,aAAa,CAAC,MAAM,CAAC;AACnD,QAAA,IAAI,YAAY,GAAG,SAAS,CAAC,MAAM,EAAE;YACnC,SAAS,IAAI,SAAS,CAAC,MAAM,CAAC,YAAY,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;AAChE,SAAA;QACD,OAAO,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,GAAG,aAAa,CAAC;AACzD,KAAA;AACH,CAAC;AAyBD;;;;;AAKG;AACa,SAAA,MAAM,CAAC,IAAY,EAAE,IAAY,EAAA;IAC/C,OAAO,IAAI,CAAC,iBAAiB,EAAE,KAAK,IAAI,CAAC,iBAAiB,EAAE,CAAC;AAC/D,CAAC;AAED;;;;AAIG;AACG,SAAU,qBAAqB,CAAC,GAAW,EAAA;IAC/C,MAAM,SAAS,GAAeD,mBAAU,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACpD,IAAA,IAAI,WAAW,CAAC;IAChB,IAAI;AACF,QAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;AAEjD,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,YAAA,WAAW,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AAClD,SAAA;AAAM,aAAA;;YAEL,WAAW,GAAG,EAAE,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC;AACpB,KAAA;AAAC,IAAA,OAAO,KAAU,EAAE;AACnB,QAAA,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;AAC7E,KAAA;AACH,CAAC;AAEK,SAAU,iBAAiB,CAAC,SAAqB,EAAA;AACrD,IAAA,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,EAAE;AACrC,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;AAED,IAAA,MAAM,IAAI,GACR,SAAS,CAAC,OAAO,EAAG,IAAI,SAAS,CAAC,OAAO,EAAE,KAAK,SAAS,GAAG,EAAE,GAAG,GAAG,GAAG,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC;;;;;AAM9F,IAAA,OAAO,4HAA4H,CAAC,IAAI,CACtI,IAAI,CACL,CAAC;AACJ,CAAC;AAED;;;;AAIG;AACG,SAAU,gBAAgB,CAAC,IAAW,EAAA;IAC1C,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,QAAQ,GAAG,EAAE,CAAC;AACpB,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,QAAQ,CAAC,IAAI,CAAC,CAAA,EAAG,kBAAkB,CAAC,GAAG,CAAC,CAAA,CAAA,EAAI,kBAAkB,CAAC,KAAK,CAAC,CAAA,CAAE,CAAC,CAAC;AAC1E,SAAA;AACF,KAAA;AAED,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAC5B,CAAC;AAED;;;;AAIG;AACG,SAAU,UAAU,CAAC,IAAW,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;AAED,IAAA,MAAM,GAAG,GAAa;AACpB,QAAA,UAAU,EAAE,EAAE;KACf,CAAC;AAEF,IAAA,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE;AACtB,QAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE;AACnD,YAAA,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC;AACxB,YAAA,GAAG,CAAC,UAAU,CAAC,IAAI,CAAC;gBAClB,GAAG;gBACH,KAAK;AACN,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,MAAM,CAAC,IAAe,EAAA;IACpC,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,GAAG,GAAS,EAAE,CAAC;AACrB,IAAA,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,UAAU,EAAE;QACrC,GAAG,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,OAAO,CAAC,KAAK,CAAC;AAClC,KAAA;AACD,IAAA,OAAO,GAAG,CAAC;AACb,CAAC;AAED;;;;AAIG;AACG,SAAU,oBAAoB,CAClC,iBAIiC,EAAA;IAEjC,IAAI,iBAAiB,KAAK,SAAS,EAAE;AACnC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,QAAQ,iBAAiB,CAAC,IAAI;AAC5B,QAAA,KAAK,KAAK;YACR,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,WAAW;AACjB,oBAAA,0BAA0B,EAAE;AAC1B,wBAAA,eAAe,EAAE,iBAAiB,CAAC,eAAe,IAAI,GAAG;AACzD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,UAAU,IAAI,EAAE;wBAC9C,eAAe,EAAE,iBAAiB,CAAC,eAAe;AAClD,wBAAA,UAAU,EAAE,iBAAiB,CAAC,eAAe,IAAI,EAAE;AACnD,wBAAA,cAAc,EAAE,iBAAiB,CAAC,UAAU,IAAI,KAAK;AACtD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,MAAM;YACT,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,MAAM;AACZ,oBAAA,qBAAqB,EAAE;wBACrB,eAAe,EAAE,iBAAiB,CAAC,eAAe;AACnD,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,OAAO;YACV,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,OAAO;AACb,oBAAA,kBAAkB,EAAE;wBAClB,MAAM,EAAE,iBAAiB,CAAC,MAAM;AACjC,qBAAA;AACF,iBAAA;aACF,CAAC;AACJ,QAAA,KAAK,SAAS;YACZ,OAAO;AACL,gBAAA,MAAM,EAAE;AACN,oBAAA,IAAI,EAAE,SAAS;AAChB,iBAAA;aACF,CAAC;AAEJ,QAAA;AACE,YAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACtD,KAAA;AACH,CAAC;AAEK,SAAU,4BAA4B,CAC1C,uBAAgD,EAAA;IAEhD,IAAI,CAAC,uBAAuB,EAAE;AAC5B,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,IAAI,WAAW,IAAI,uBAAuB,EAAE;;;AAG1C,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,YAAY,GAA8B,EAAE,CAAC;AACnD,IAAA,KAAK,MAAM,GAAG,IAAI,uBAAuB,EAAE;QACzC,MAAM,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QAC3B,MAAM,YAAY,GAAG,KAAK,CAAC;QAC3B,IAAI,GAAG,CAAC,CAAC,CAAC,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;AACnC,YAAA,GAAG,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;AAChD,SAAA;AACD,QAAA,MAAM,IAAI,GAA0B;AAClC,YAAA,MAAM,EAAE,GAAG,CAAC,CAAC,CAAC;AACd,YAAA,iBAAiB,EAAE,uBAAuB,CAAC,GAAG,CAA4B;SAC3E,CAAC;QACF,MAAM,WAAW,GAAG,YAAY,CAAC,SAAS,CAAC,CAAC,MAAM,KAAK,MAAM,CAAC,QAAQ,KAAK,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACnF,QAAA,IAAI,WAAW,GAAG,CAAC,CAAC,EAAE;YACpB,YAAY,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5C,SAAA;AAAM,aAAA;YACL,YAAY,CAAC,IAAI,CAAC;AAChB,gBAAA,QAAQ,EAAE,GAAG,CAAC,CAAC,CAAC;gBAChB,KAAK,EAAE,CAAC,IAAI,CAAC;AACd,aAAA,CAAC,CAAC;AACJ,SAAA;AACF,KAAA;AACD,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAED;;;;;AAKG;AACa,SAAA,gBAAgB,CAAI,KAAQ,EAAE,UAA2B,EAAA;AACtE,IAAA,KAAa,CAAC,UAAU,GAAG,UAAU,CAAC;AACvC,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAEK,SAAU,yBAAyB,CACvC,iBAAqC,EAAA;AAErC,IAAA,OAAO,iBAAiB,GAAG,iBAAiB,CAAC,MAAM,GAAG,GAAG,GAAG,iBAAiB,CAAC,KAAK,GAAG,SAAS,CAAC;AAClG,CAAC;AAEK,SAAU,gBAAgB,CAAC,IAAc,EAAA;IAC7C,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,QAAA,OAAO,kBAAkB,CAAC,IAAI,CAAC,OAAQ,CAAC,CAAC;AAC1C,KAAA;AAAM,SAAA;QACL,OAAO,IAAI,CAAC,OAAQ,CAAC;AACtB,KAAA;AACH,CAAC;AAEK,SAAU,qCAAqC,CACnD,gBAA8C,EAAA;IAE9C,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAEK,SAAU,0CAA0C,CACxD,gBAAmD,EAAA;;IAEnD,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,gBAAgB,CACnB,EAAA,EAAA,OAAO,EAAE;AACP,YAAA,YAAY,EAAE,CAAA,EAAA,GAAA,gBAAgB,CAAC,OAAO,CAAC,YAAY,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AAC9E,gBAAA,MAAM,UAAU,GAAoB;AAClC,oBAAA,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;iBAChD,CAAC;AACF,gBAAA,OAAO,UAAU,CAAC;AACpB,aAAC,CAAC;AACF,YAAA,SAAS,EAAE,gBAAgB,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AACpE,gBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,GAC7C,CAAC;AACF,gBAAA,OAAO,QAAQ,CAAC;AAClB,aAAC,CAAC;SACH,EACD,CAAA,CAAA;AACJ,CAAC;AAED,SAAS,kBAAkB,CAAC,KAAa,EAAA;AACvC,IAAA,IAAIC,eAAM,EAAE;QACV,OAAO,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACrC,KAAA;AAAM,SAAA;AACL,QAAA,MAAM,UAAU,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAC/B,MAAM,GAAG,GAAG,IAAI,UAAU,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;AAC9C,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,GAAG,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC;AACnC,SAAA;AACD,QAAA,OAAO,GAAG,CAAC;AACZ,KAAA;AACH,CAAC;AAED,SAAS,YAAY,CAAC,OAAY,EAAA;IAChC,IAAI,OAAO,KAAK,SAAS;AAAE,QAAA,OAAO,SAAS,CAAC;IAC5C,IAAI,OAAO,KAAK,MAAM;AAAE,QAAA,OAAO,IAAI,CAAC;IACpC,IAAI,OAAO,KAAK,OAAO;AAAE,QAAA,OAAO,KAAK,CAAC;AACtC,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB,EAAA;AACvC,IAAA,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,aAAa,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;QACxE,OAAO;YACL,OAAO,EAAE,YAAY,CAAC,aAAa,CAAC,GAAG,CAAC,CAAC,SAAS,CAAC,CAAC;AACpD,YAAA,OAAO,EAAE,aAAa,CAAC,GAAG,CAAW;SACtC,CAAC;AACH,KAAA;AAAM,SAAA;QACL,OAAO;AACL,YAAA,OAAO,EAAE,KAAK;AACd,YAAA,OAAO,EAAE,aAAuB;SACjC,CAAC;AACH,KAAA;AACH,CAAC;AAED,SAAS,mBAAmB,CAAC,mBAAwB,EAAA;IACnD,MAAM,cAAc,GAAG,mBAAmB,CAAC;AAC3C,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;AACpF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;QACxC,cAAc,CAAC,YAAY,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,eAAe,CAAW,CAAC,CAAC;AACvF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,MAAM,CAAC,EAAE;AAC/B,QAAA,cAAc,CAAC,IAAI,GAAG,mBAAmB,CAAC,MAAM,CAAW,CAAC;AAC5D,QAAA,OAAO,cAAc,CAAC,MAAM,CAAC,CAAC;AAC/B,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,aAAa,GAAG,UAAU,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;AAC3F,QAAA,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACzC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;AACvC,QAAA,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;AAC3E,QAAA,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;AACvC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,kBAAkB,CAAC,EAAE;AAC3C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,kBAAkB,CAAW,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,kBAAkB,CAAC,CAAC;AAC3C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,UAAU,GAAG,kBAAkB,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAC7F,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;AAC9C,QAAA,cAAc,CAAC,kBAAkB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;AACzF,QAAA,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;AAC9C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,eAAe,CAAW,CAAC;AAC7E,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;QACpD,cAAc,CAAC,kBAAkB,GAAG,UAAU,CAC5C,mBAAmB,CAAC,2BAA2B,CAAW,CAC3D,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;AACnC,QAAA,cAAc,CAAC,QAAQ,GAAG,mBAAmB,CAAC,UAAU,CAAa,CAAC;AACtE,QAAA,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;AACnC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;AACtC,QAAA,cAAc,CAAC,WAAW,GAAG,mBAAmB,CAAC,aAAa,CAAoB,CAAC;AACnF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAsB,CAAC;AACzF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;AACjC,QAAA,cAAc,CAAC,MAAM,GAAG,mBAAmB,CAAC,QAAQ,CAAW,CAAC;AAChE,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;AACjC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAmB,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAW,CAAC;AACxE,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,cAAc,CAAC,EAAE;AACvC,QAAA,cAAc,CAAC,YAAY,GAAG,mBAAmB,CAAC,cAAc,CAAW,CAAC;AAC5E,QAAA,OAAO,cAAc,CAAC,cAAc,CAAC,CAAC;AACvC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,eAAe,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,oBAAoB,CAAW,CAAC,CAAC;AAC/F,QAAA,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;AAC7C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,uBAAuB,CAAC,EAAE;AAChD,QAAA,cAAc,CAAC,qBAAqB,GAAG,mBAAmB,CAAC,uBAAuB,CAAW,CAAC;AAC9F,QAAA,OAAO,cAAc,CAAC,uBAAuB,CAAC,CAAC;AAChD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;QAC1C,cAAc,CAAC,eAAe,GAAG,YAAY,CAAC,mBAAmB,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,qBAAqB,CAAC,EAAE;AAC9C,QAAA,cAAc,CAAC,mBAAmB,GAAG,mBAAmB,CAAC,qBAAqB,CAAW,CAAC;AAC1F,QAAA,OAAO,cAAc,CAAC,qBAAqB,CAAC,CAAC;AAC9C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;QACjD,cAAc,CAAC,sBAAsB,GAAG,UAAU,CAChD,mBAAmB,CAAC,wBAAwB,CAAW,CACxD,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;AACjD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,YAAY,CAAC,EAAE;AACrC,QAAA,cAAc,CAAC,UAAU,GAAG,mBAAmB,CAAC,YAAY,CAAe,CAAC;AAC5E,QAAA,OAAO,cAAc,CAAC,YAAY,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,oBAAoB,CAAC,EAAE;QAC7C,cAAc,CAAC,kBAAkB,GAAG,YAAY,CAAC,mBAAmB,CAAC,oBAAoB,CAAC,CAAC,CAAC;AAC5F,QAAA,OAAO,cAAc,CAAC,oBAAoB,CAAC,CAAC;AAC7C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,eAAe,CAAC,EAAE;AACxC,QAAA,cAAc,CAAC,aAAa,GAAG,mBAAmB,CAAC,eAAe,CAAkB,CAAC;AACrF,QAAA,OAAO,cAAc,CAAC,eAAe,CAAC,CAAC;AACxC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,2BAA2B,CAAC,EAAE;AACpD,QAAA,cAAc,CAAC,yBAAyB,GAAG,mBAAmB,CAC5D,2BAA2B,CAClB,CAAC;AACZ,QAAA,OAAO,cAAc,CAAC,2BAA2B,CAAC,CAAC;AACpD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,iBAAiB,CAAC,EAAE;AAC1C,QAAA,cAAc,CAAC,eAAe,GAAG,mBAAmB,CAAC,iBAAiB,CAAW,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAC1C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,sBAAsB,CAAC,EAAE;QAC/C,cAAc,CAAC,mBAAmB,GAAG,IAAI,IAAI,CAC3C,mBAAmB,CAAC,sBAAsB,CAAW,CACtD,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,sBAAsB,CAAC,CAAC;AAC/C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,UAAU,CAAC,EAAE;QACnC,cAAc,CAAC,QAAQ,GAAG,UAAU,CAAC,mBAAmB,CAAC,UAAU,CAAW,CAAC,CAAC;AAChF,QAAA,OAAO,cAAc,CAAC,UAAU,CAAC,CAAC;AACnC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,aAAa,CAAC,EAAE;QACtC,cAAc,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,aAAa,CAAW,CAAC,CAAC;AAClF,QAAA,OAAO,cAAc,CAAC,aAAa,CAAC,CAAC;AACtC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,QAAQ,CAAC,EAAE;QACjC,cAAc,CAAC,QAAQ,GAAG,YAAY,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,cAAc,CAAC,QAAQ,CAAC,CAAC;AACjC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,mBAAmB,CAAC,EAAE;AAC5C,QAAA,cAAc,CAAC,iBAAiB,GAAG,mBAAmB,CACpD,mBAAmB,CACC,CAAC;AACvB,QAAA,OAAO,cAAc,CAAC,mBAAmB,CAAC,CAAC;AAC5C,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,gBAAgB,CAAC,EAAE;QACzC,cAAc,CAAC,cAAc,GAAG,IAAI,IAAI,CAAC,mBAAmB,CAAC,gBAAgB,CAAW,CAAC,CAAC;AAC1F,QAAA,OAAO,cAAc,CAAC,gBAAgB,CAAC,CAAC;AACzC,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,6BAA6B,CAAC,EAAE;QACtD,cAAc,CAAC,2BAA2B,GAAG,IAAI,IAAI,CACnD,mBAAmB,CAAC,6BAA6B,CAAW,CAC7D,CAAC;AACF,QAAA,OAAO,cAAc,CAAC,6BAA6B,CAAC,CAAC;AACtD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,wBAAwB,CAAC,EAAE;AACjD,QAAA,cAAc,CAAC,sBAAsB,GAAG,mBAAmB,CACzD,wBAAwB,CACK,CAAC;AAChC,QAAA,OAAO,cAAc,CAAC,wBAAwB,CAAC,CAAC;AACjD,KAAA;AAED,IAAA,IAAI,mBAAmB,CAAC,WAAW,CAAC,EAAE;QACpC,cAAc,CAAC,SAAS,GAAG,YAAY,CAAC,mBAAmB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC1E,QAAA,OAAO,cAAc,CAAC,WAAW,CAAC,CAAC;AACpC,KAAA;AAED,IAAA,OAAO,cAAc,CAAC;AACxB,CAAC;AAED,SAAS,aAAa,CAAC,SAAc,EAAA;IACnC,MAAM,QAAQ,GAAG,SAAS,CAAC;IAC3B,QAAQ,CAAC,UAAU,GAAG,mBAAmB,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC,CAAC;AACnE,IAAA,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;IAE9B,QAAQ,CAAC,IAAI,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;AACjD,IAAA,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;IACxB,QAAQ,CAAC,OAAO,GAAG,YAAY,CAAC,SAAS,CAAC,SAAS,CAAC,CAAE,CAAC;AACvD,IAAA,OAAO,QAAQ,CAAC,SAAS,CAAC,CAAC;AAE3B,IAAA,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;AACzB,QAAA,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAW,CAAC;AACpD,QAAA,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,WAAW,CAAC,EAAE;AAC1B,QAAA,QAAQ,CAAC,SAAS,GAAG,SAAS,CAAC,WAAW,CAAW,CAAC;AACtD,QAAA,OAAO,QAAQ,CAAC,WAAW,CAAC,CAAC;AAC9B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,kBAAkB,CAAC,EAAE;QACjC,QAAQ,CAAC,gBAAgB,GAAG,YAAY,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC,CAAC;AACxE,QAAA,OAAO,QAAQ,CAAC,kBAAkB,CAAC,CAAC;AACrC,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,UAAU,CAAC,EAAE;AACzB,QAAA,QAAQ,CAAC,QAAQ,GAAG,SAAS,CAAC,UAAU,CAAC,CAAC;AAC1C,QAAA,OAAO,QAAQ,CAAC,UAAU,CAAC,CAAC;AAC7B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,MAAM,CAAC,EAAE;QACrB,QAAQ,CAAC,QAAQ,GAAG,aAAa,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC;AACrD,QAAA,OAAO,QAAQ,CAAC,MAAM,CAAC,CAAC;AACzB,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;AAC3B,QAAA,QAAQ,CAAC,yBAAyB,GAAG,SAAS,CAAC,YAAY,CAAC,CAAC;AAC7D,QAAA,OAAO,QAAQ,CAAC,YAAY,CAAC,CAAC;AAC/B,KAAA;AAED,IAAA,IAAI,SAAS,CAAC,iBAAiB,CAAC,EAAE;QAChC,QAAQ,CAAC,eAAe,GAAG,YAAY,CAAC,SAAS,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACtE,QAAA,OAAO,QAAQ,CAAC,iBAAiB,CAAC,CAAC;AACpC,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC;AAClB,CAAC;AAED,SAAS,eAAe,CAAC,eAAoB,EAAA;IAC3C,OAAO;AACL,QAAA,IAAI,EAAE,aAAa,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;KAC7C,CAAC;AACJ,CAAC;AAED,SAAS,YAAY,CAAC,YAAiB,EAAA;IACrC,OAAO;AACL,QAAA,GAAG,EAAE,YAAY,CAAC,KAAK,CAAC;AACxB,QAAA,KAAK,EAAE,YAAY,CAAC,OAAO,CAAC;KAC7B,CAAC;AACJ,CAAC;AAED,SAAS,aAAa,CAAC,aAAkB,EAAA;IACvC,IACE,aAAa,KAAK,SAAS;AAC3B,QAAA,aAAa,CAAC,QAAQ,CAAC,KAAK,SAAS;QACrC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,KAAK,SAAS,EAC5C;AACA,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,MAAM,UAAU,GAAG,EAAE,CAAC;IACtB,IAAI,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,YAAY,KAAK,EAAE;AACnD,QAAA,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,OAAO,CAAC,CAAC,YAAiB,KAAI;YAC3D,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,YAAY,CAAC,CAAC,CAAC;AAC9C,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;AACL,QAAA,UAAU,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAC/D,KAAA;AAED,IAAA,OAAO,EAAE,UAAU,EAAE,UAAU,EAAE,CAAC;AACpC,CAAC;AAEK,SAAU,gBAAgB,CAAC,cAAqB,EAAA;IACpD,MAAM,SAAS,GAAG,EAAE,CAAC;IAErB,IAAI,cAAc,YAAY,KAAK,EAAE;AACnC,QAAA,cAAc,CAAC,OAAO,CAAC,CAAC,SAAc,KAAI;YACxC,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;QACL,SAAS,CAAC,IAAI,CAAC,aAAa,CAAC,cAAc,CAAC,CAAC,CAAC;AAC/C,KAAA;AAED,IAAA,OAAO,SAAS,CAAC;AACnB,CAAC;AAEK,SAAU,mBAAmB,CAAC,iBAAwB,EAAA;IAC1D,MAAM,YAAY,GAAG,EAAE,CAAC;IAExB,IAAI,iBAAiB,YAAY,KAAK,EAAE;AACtC,QAAA,iBAAiB,CAAC,OAAO,CAAC,CAAC,eAAoB,KAAI;YACjD,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,CAAC,CAAC;AACtD,SAAC,CAAC,CAAC;AACJ,KAAA;AAAM,SAAA;QACL,YAAY,CAAC,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAC,CAAC,CAAC;AACvD,KAAA;AAED,IAAA,OAAO,YAAY,CAAC;AACtB,CAAC;AAEc,UAAE,yBAAyB,CACxC,oBAA4D,EAAA;IAE5D,IAAI,SAAS,GAAgB,EAAE,CAAC;IAChC,IAAI,UAAU,GAAiB,EAAE,CAAC;IAElC,IAAI,oBAAoB,CAAC,SAAS;AAAE,QAAA,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;IAC/E,IAAI,oBAAoB,CAAC,UAAU;AAAE,QAAA,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;IAElF,IAAI,cAAc,GAAG,CAAC,CAAC;IACvB,IAAI,eAAe,GAAG,CAAC,CAAC;IAExB,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,IAAI,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE;AAC/E,QAAA,IAAI,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,GAAG,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK,EAAE;YACvE,MAAM;AACJ,gBAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,gBAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,gBAAA,OAAO,EAAE,KAAK;aACf,CAAC;AACF,YAAA,EAAE,cAAc,CAAC;AAClB,SAAA;AAAM,aAAA;YACL,MAAM;AACJ,gBAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,gBAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,gBAAA,OAAO,EAAE,IAAI;aACd,CAAC;AACF,YAAA,EAAE,eAAe,CAAC;AACnB,SAAA;AACF,KAAA;IAED,OAAO,cAAc,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,cAAc,EAAE;QAC1D,MAAM;AACJ,YAAA,KAAK,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK;AACtC,YAAA,GAAG,EAAE,SAAS,CAAC,cAAc,CAAC,CAAC,GAAG;AAClC,YAAA,OAAO,EAAE,KAAK;SACf,CAAC;AACH,KAAA;IAED,OAAO,eAAe,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,eAAe,EAAE;QAC7D,MAAM;AACJ,YAAA,KAAK,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,KAAK;AACxC,YAAA,GAAG,EAAE,UAAU,CAAC,eAAe,CAAC,CAAC,GAAG;AACpC,YAAA,OAAO,EAAE,IAAI;SACd,CAAC;AACH,KAAA;AACH;;AC3tCA;AAeA;;;;;;;;;;AAUG;AACG,MAAO,oBAAqB,SAAQC,0BAAiB,CAAA;AACzD;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;YACV,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;AAC9C,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE,KAAK,MAAM,EAAE;YACrF,OAAO,CAAC,GAAG,GAAG,eAAe,CAC3B,OAAO,CAAC,GAAG,EACX,YAAY,CAAC,UAAU,CAAC,sBAAsB,EAC9C,IAAI,IAAI,EAAE,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAChC,CAAC;AACH,SAAA;QAED,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;;QAG/C,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,eAAe,CAAC,cAAc,CAAC,CAAC;QAEvD,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;AC/DD;AAOA;;AAEG;MACU,2BAA2B,CAAA;AACtC;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,oBAAoB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACtD;AACF;;ACpBD;AAkCA;;AAEG;AACSE,wCASX;AATD,CAAA,UAAY,sBAAsB,EAAA;AAChC;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,aAAA,CAAA,GAAA,CAAA,CAAA,GAAA,aAAW,CAAA;AACX;;AAEG;AACH,IAAA,sBAAA,CAAA,sBAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EATWA,8BAAsB,KAAtBA,8BAAsB,GASjC,EAAA,CAAA,CAAA,CAAA;AAED;AACA,MAAM,qBAAqB,GAAwB;IACjD,iBAAiB,EAAE,GAAG,GAAG,IAAI;AAC7B,IAAA,QAAQ,EAAE,CAAC;IACX,cAAc,EAAE,CAAC,GAAG,IAAI;IACxB,eAAe,EAAEA,8BAAsB,CAAC,WAAW;AACnD,IAAA,aAAa,EAAE,EAAE;IACjB,cAAc,EAAE,SAAS;CAC1B,CAAC;AAEF,MAAM,iBAAiB,GAAG,IAAIC,0BAAU,CAAC,4BAA4B,CAAC,CAAC;AAEvE;;AAEG;AACG,MAAO,kBAAmB,SAAQF,0BAAiB,CAAA;AAMvD;;;;;;AAMG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,eAAoC,qBAAqB,EAAA;AAEzD,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;;QAG3B,IAAI,CAAC,YAAY,GAAG;YAClB,eAAe,EAAE,YAAY,CAAC,eAAe;kBACzC,YAAY,CAAC,eAAe;kBAC5B,qBAAqB,CAAC,eAAe;YAEzC,QAAQ,EACN,YAAY,CAAC,QAAQ,IAAI,YAAY,CAAC,QAAQ,IAAI,CAAC;kBAC/C,IAAI,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC;kBACjC,qBAAqB,CAAC,QAAQ;YAEpC,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,YAAY,CAAC,cAAc;kBAC3B,qBAAqB,CAAC,cAAc;YAE1C,cAAc,EACZ,YAAY,CAAC,cAAc,IAAI,YAAY,CAAC,cAAc,IAAI,CAAC;kBAC3D,IAAI,CAAC,GAAG,CACN,YAAY,CAAC,cAAc,EAC3B,YAAY,CAAC,iBAAiB;sBAC1B,YAAY,CAAC,iBAAiB;AAChC,sBAAE,qBAAqB,CAAC,iBAAkB,CAC7C;kBACD,qBAAqB,CAAC,cAAc;YAE1C,iBAAiB,EACf,YAAY,CAAC,iBAAiB,IAAI,YAAY,CAAC,iBAAiB,IAAI,CAAC;kBACjE,YAAY,CAAC,iBAAiB;kBAC9B,qBAAqB,CAAC,iBAAiB;YAE7C,aAAa,EAAE,YAAY,CAAC,aAAa;kBACrC,YAAY,CAAC,aAAa;kBAC1B,qBAAqB,CAAC,aAAa;SACxC,CAAC;KACH;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,KAAK,EAAE,CAAC,CAAC,CAAC;KACnD;AAED;;;;;;;;;AASG;AACO,IAAA,MAAM,kBAAkB,CAChC,OAAoB,EACpB,eAAwB,EACxB,OAAe,EAAA;AAEf,QAAA,MAAM,UAAU,GAAgB,OAAO,CAAC,KAAK,EAAE,CAAC;QAEhD,MAAM,cAAc,GAClB,eAAe;AACf,YAAA,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa;AAChC,YAAA,EAAE,OAAO,CAAC,MAAM,KAAK,KAAK,IAAI,OAAO,CAAC,MAAM,KAAK,MAAM,IAAI,OAAO,CAAC,MAAM,KAAK,SAAS,CAAC;AACxF,YAAA,OAAO,GAAG,CAAC,KAAK,CAAC,CAAC;QAEpB,IAAI,CAAC,cAAc,EAAE;AACnB,YAAA,UAAU,CAAC,GAAG,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,aAAc,CAAC,CAAC;AAC/E,SAAA;;AAGD,QAAA,IAAI,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE;AACpC,YAAA,UAAU,CAAC,GAAG,GAAG,eAAe,CAC9B,UAAU,CAAC,GAAG,EACd,YAAY,CAAC,UAAU,CAAC,OAAO,EAC/B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,YAAY,CAAC,cAAe,GAAG,IAAI,CAAC,CAAC,QAAQ,EAAE,CAChE,CAAC;AACH,SAAA;AAED,QAAA,IAAI,QAA2C,CAAC;QAChD,IAAI;AACF,YAAA,MAAM,CAAC,IAAI,CAAC,CAA2B,wBAAA,EAAA,OAAO,IAAI,cAAc,GAAG,SAAS,GAAG,WAAW,CAAA,CAAE,CAAC,CAAC;YAC9F,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,CAAC,CAAC;YAC1D,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,CAAC,EAAE;AACxD,gBAAA,OAAO,QAAQ,CAAC;AACjB,aAAA;AAED,YAAA,eAAe,GAAG,eAAe,KAAK,CAAC,cAAc,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,CAAC,CAAC;AACnF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,MAAM,CAAC,KAAK,CAAC,CAAA,oCAAA,EAAuC,GAAG,CAAC,OAAO,CAAA,QAAA,EAAW,GAAG,CAAC,IAAI,CAAA,CAAE,CAAC,CAAC;AACtF,YAAA,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,cAAc,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,CAAC,EAAE;AAC7D,gBAAA,MAAM,GAAG,CAAC;AACX,aAAA;AACF,SAAA;AAED,QAAA,MAAM,IAAI,CAAC,KAAK,CAAC,cAAc,EAAE,OAAO,EAAE,OAAO,CAAC,WAAW,CAAC,CAAC;QAC/D,OAAO,IAAI,CAAC,kBAAkB,CAAC,OAAO,EAAE,eAAe,EAAE,EAAE,OAAO,CAAC,CAAC;KACrE;AAED;;;;;;;AAOG;AACO,IAAA,WAAW,CACnB,cAAuB,EACvB,OAAe,EACf,QAAgC,EAChC,GAAe,EAAA;AAEf,QAAA,IAAI,OAAO,IAAI,IAAI,CAAC,YAAY,CAAC,QAAS,EAAE;YAC1C,MAAM,CAAC,IAAI,CACT,CAAA,wBAAA,EAA2B,OAAO,CAAgB,aAAA,EAAA,IAAI,CAAC,YAAY;iBAChE,QAAS,CAAA,iBAAA,CAAmB,CAChC,CAAC;AACF,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;;;AAID,QAAA,MAAM,eAAe,GAAG;YACtB,WAAW;YACX,iBAAiB;YACjB,cAAc;YACd,YAAY;YACZ,QAAQ;YACR,WAAW;YACX,SAAS;YACT,OAAO;AACP,YAAA,oBAAoB;SACrB,CAAC;AACF,QAAA,IAAI,GAAG,EAAE;AACP,YAAA,KAAK,MAAM,cAAc,IAAI,eAAe,EAAE;gBAC5C,IACE,GAAG,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;oBAC/C,GAAG,CAAC,OAAO,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,cAAc,CAAC;AAClD,qBAAC,GAAG,CAAC,IAAI,IAAI,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,WAAW,EAAE,KAAK,cAAc,CAAC,EAClE;AACA,oBAAA,MAAM,CAAC,IAAI,CAAC,8BAA8B,cAAc,CAAA,mBAAA,CAAqB,CAAC,CAAC;AAC/E,oBAAA,OAAO,IAAI,CAAC;AACb,iBAAA;AACF,aAAA;AACF,SAAA;;;;QAKD,IAAI,QAAQ,IAAI,GAAG,EAAE;YACnB,MAAM,UAAU,GAAG,QAAQ,GAAG,QAAQ,CAAC,MAAM,GAAG,GAAG,GAAG,GAAG,CAAC,UAAU,GAAG,CAAC,CAAC;AACzE,YAAA,IAAI,CAAC,cAAc,IAAI,UAAU,KAAK,GAAG,EAAE;AACzC,gBAAA,MAAM,CAAC,IAAI,CAAC,CAAA,mDAAA,CAAqD,CAAC,CAAC;AACnE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;;AAGD,YAAA,IAAI,UAAU,KAAK,GAAG,IAAI,UAAU,KAAK,GAAG,EAAE;AAC5C,gBAAA,MAAM,CAAC,IAAI,CAAC,2CAA2C,UAAU,CAAA,CAAA,CAAG,CAAC,CAAC;AACtE,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;QAED,IAAI,CAAA,GAAG,KAAA,IAAA,IAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,IAAI,MAAK,aAAa,KAAI,GAAG,aAAH,GAAG,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAH,GAAG,CAAE,OAAO,CAAC,UAAU,CAAC,CAAA,+BAAA,CAAiC,CAAC,CAAA,EAAE;AAC7F,YAAA,MAAM,CAAC,IAAI,CACT,iFAAiF,CAClF,CAAC;AACF,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;AAMG;AACK,IAAA,MAAM,KAAK,CAAC,cAAuB,EAAE,OAAe,EAAE,WAA6B,EAAA;QACzF,IAAI,aAAa,GAAW,CAAC,CAAC;AAE9B,QAAA,IAAI,cAAc,EAAE;AAClB,YAAA,QAAQ,IAAI,CAAC,YAAY,CAAC,eAAe;gBACvC,KAAKC,8BAAsB,CAAC,WAAW;AACrC,oBAAA,aAAa,GAAG,IAAI,CAAC,GAAG,CACtB,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,OAAO,GAAG,CAAC,CAAC,GAAG,CAAC,IAAI,IAAI,CAAC,YAAY,CAAC,cAAe,EAClE,IAAI,CAAC,YAAY,CAAC,iBAAkB,CACrC,CAAC;oBACF,MAAM;gBACR,KAAKA,8BAAsB,CAAC,KAAK;AAC/B,oBAAA,aAAa,GAAG,IAAI,CAAC,YAAY,CAAC,cAAe,CAAC;oBAClD,MAAM;AACT,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,aAAa,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,IAAI,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,CAAC,IAAI,CAAC,0BAA0B,aAAa,CAAA,EAAA,CAAI,CAAC,CAAC;QACzD,OAAO,KAAK,CAAC,aAAa,EAAE,WAAW,EAAE,iBAAiB,CAAC,CAAC;KAC7D;AACF;;ACjSD;AAyDA;;AAEG;MACU,yBAAyB,CAAA;AAGpC;;;AAGG;AACH,IAAA,WAAA,CAAY,YAAkC,EAAA;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,kBAAkB,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;KACvE;AACF;;AChFD;AAKA;;;AAGG;AACG,MAAgB,gBAAiB,SAAQD,0BAAiB,CAAA;AAC9D;;;;AAIG;AACI,IAAA,WAAW,CAAC,OAAoB,EAAA;AACrC,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC,CAAC;KAChE;AAED;;;;;AAKG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;;;AAGxC,QAAA,OAAO,OAAO,CAAC;KAChB;AACF;;AC9BD;AAOA;;;AAGG;AACG,MAAO,yBAA0B,SAAQ,gBAAgB,CAAA;AAC7D;;;;AAIG;;;IAGH,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;AACF;;ACtBD;AACA;AAKA;;;AAGG;MACmB,UAAU,CAAA;AAC9B;;;;;AAKG;IACI,MAAM,CAAC,WAA0B,EAAE,QAA8B,EAAA;AACtE,QAAA,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC;KACtE;AACF;;ACpBD;AAQA;;;;;AAKG;AACG,MAAO,mBAAoB,SAAQ,UAAU,CAAA;AACjD;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,OAAO,IAAI,yBAAyB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC3D;AACF;;AC3BD;AAeA;;AAEG;AACG,MAAO,eAAgB,SAAQA,0BAAiB,CAAA;AAMpD;;;;;AAKG;AACH,IAAA,WAAA,CAAY,UAAyB,EAAE,OAA6B,EAAE,SAAiB,EAAA;AACrF,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;KAC5B;AAED;;;;AAIG;IACI,MAAM,WAAW,CAAC,OAAoB,EAAA;AAC3C,QAAA,IAAID,eAAM,EAAE;AACV,YAAA,IAAI,CAAC,OAAO,CAAC,OAAO,EAAE;AACpB,gBAAA,OAAO,CAAC,OAAO,GAAG,IAAII,oBAAW,EAAE,CAAC;AACrC,aAAA;YACD,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACpD,gBAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;AACjE,aAAA;AACF,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF;;ACpDD;AAeA;;AAEG;MACU,sBAAsB,CAAA;AAMjC;;;AAGG;AACH,IAAA,WAAA,CAAY,SAA4B,EAAA;QACtC,MAAM,aAAa,GAAa,EAAE,CAAC;AAEnC,QAAA,IAAIJ,eAAM,EAAE;AACV,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,MAAM,eAAe,GAAG,SAAS,CAAC,eAAe,IAAI,EAAE,CAAC;AACxD,gBAAA,IAAI,eAAe,CAAC,MAAM,GAAG,CAAC,IAAI,aAAa,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,EAAE;AAC/E,oBAAA,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AACrC,iBAAA;AACF,aAAA;;AAGD,YAAA,MAAM,OAAO,GAAG,CAAwB,qBAAA,EAAA,WAAW,EAAE,CAAC;YACtD,IAAI,aAAa,CAAC,OAAO,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE;AACzC,gBAAA,aAAa,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AAC7B,aAAA;;AAGD,YAAA,IAAI,WAAW,GAAG,CAAA,cAAA,EAAiB,OAAO,CAAC,OAAO,GAAG,CAAC;AACtD,YAAA,IAAIK,aAAE,EAAE;AACN,gBAAA,WAAW,GAAG,CAAiB,cAAA,EAAA,OAAO,CAAC,OAAO,KAAKA,aAAE,CAAC,IAAI,EAAE,IAAIA,aAAE,CAAC,OAAO,EAAE,GAAG,CAAC;AACjF,aAAA;YACD,IAAI,aAAa,CAAC,OAAO,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC,EAAE;AAC7C,gBAAA,aAAa,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AACjC,aAAA;AACF,SAAA;QAED,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAChD;AAED;;;;;AAKG;IACI,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;QACpE,OAAO,IAAI,eAAe,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;KACvE;AACF;;ACnED;AAMA,MAAM,kBAAkB,GAAG,IAAIC,0BAAiB,EAAE,CAAC;SAEnC,0BAA0B,GAAA;AACxC,IAAA,OAAO,kBAAkB,CAAC;AAC5B;;ACVA;AAcA;;AAEG;AACH,MAAM,SAAS,GAAG;AAChB,IAAA,YAAY,EAAE,WAAW;AACzB;;AAEG;AACH,IAAA,eAAe,EAAE;AACf;;AAEG;AACH,QAAA,aAAa,EAAE,eAAe;AAC/B,KAAA;CACF,CAAC;AAiCF;AACA,MAAM,sBAAsB,GAAuB;AACjD,IAAA,uBAAuB,EAAE,IAAI;AAC7B,IAAA,iBAAiB,EAAE,IAAI;AACvB,IAAA,iBAAiB,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;CACjC,CAAC;AAEF;;;;;;;;;;;;AAYG;AACH,eAAe,YAAY,CACzB,cAAiD,EACjD,iBAAyB,EACzB,WAAmB,EAAA;;;AAInB,IAAA,eAAe,iBAAiB,GAAA;AAC9B,QAAA,IAAI,IAAI,CAAC,GAAG,EAAE,GAAG,WAAW,EAAE;YAC5B,IAAI;gBACF,OAAO,MAAM,cAAc,EAAE,CAAC;AAC/B,aAAA;YAAC,OAAM,EAAA,EAAA;AACN,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,UAAU,GAAG,MAAM,cAAc,EAAE,CAAC;;YAG1C,IAAI,UAAU,KAAK,IAAI,EAAE;AACvB,gBAAA,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,OAAO,UAAU,CAAC;AACnB,SAAA;KACF;AAED,IAAA,IAAI,KAAK,GAAuB,MAAM,iBAAiB,EAAE,CAAC;IAE1D,OAAO,KAAK,KAAK,IAAI,EAAE;AACrB,QAAA,MAAMC,cAAK,CAAC,iBAAiB,CAAC,CAAC;AAE/B,QAAA,KAAK,GAAG,MAAM,iBAAiB,EAAE,CAAC;AACnC,KAAA;AAED,IAAA,OAAO,KAAK,CAAC;AACf,CAAC;AAED;;;;;;;;;;;;;;AAcG;AACH,SAAS,iBAAiB,CACxB,UAA2B,EAC3B,MAAyB,EACzB,kBAAgD,EAAA;IAEhD,IAAI,aAAa,GAAgC,IAAI,CAAC;IACtD,IAAI,KAAK,GAAuB,IAAI,CAAC;AAErC,IAAA,MAAM,OAAO,GACR,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,sBAAsB,CACtB,EAAA,kBAAkB,CACtB,CAAC;AAEF;;;AAGG;AACH,IAAA,MAAM,MAAM,GAAG;AACb;;AAEG;AACH,QAAA,IAAI,YAAY,GAAA;YACd,OAAO,aAAa,KAAK,IAAI,CAAC;SAC/B;AACD;;;AAGG;AACH,QAAA,IAAI,aAAa,GAAA;;AACf,YAAA,QACE,CAAC,MAAM,CAAC,YAAY;gBACpB,CAAC,CAAA,EAAA,GAAA,KAAK,KAAL,IAAA,IAAA,KAAK,uBAAL,KAAK,CAAE,kBAAkB,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,CAAC,IAAI,OAAO,CAAC,iBAAiB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzE;SACH;AACD;;;AAGG;AACH,QAAA,IAAI,WAAW,GAAA;AACb,YAAA,QACE,KAAK,KAAK,IAAI,IAAI,KAAK,CAAC,kBAAkB,GAAG,OAAO,CAAC,uBAAuB,GAAG,IAAI,CAAC,GAAG,EAAE,EACzF;SACH;KACF,CAAC;AAEF;;;AAGG;IACH,SAAS,OAAO,CAAC,eAAgC,EAAA;;AAC/C,QAAA,IAAI,CAAC,MAAM,CAAC,YAAY,EAAE;;AAExB,YAAA,MAAM,iBAAiB,GAAG,MACxB,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,eAAe,CAAC,CAAC;;;AAI/C,YAAA,aAAa,GAAG,YAAY,CAC1B,iBAAiB,EACjB,OAAO,CAAC,iBAAiB;;AAEzB,YAAA,CAAA,EAAA,GAAA,KAAK,KAAA,IAAA,IAAL,KAAK,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAL,KAAK,CAAE,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAI,IAAI,CAAC,GAAG,EAAE,CACxC;AACE,iBAAA,IAAI,CAAC,CAAC,MAAM,KAAI;gBACf,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,MAAM,CAAC;AACf,gBAAA,OAAO,KAAK,CAAC;AACf,aAAC,CAAC;AACD,iBAAA,KAAK,CAAC,CAAC,MAAM,KAAI;;;;gBAIhB,aAAa,GAAG,IAAI,CAAC;gBACrB,KAAK,GAAG,IAAI,CAAC;AACb,gBAAA,MAAM,MAAM,CAAC;AACf,aAAC,CAAC,CAAC;AACN,SAAA;AAED,QAAA,OAAO,aAAqC,CAAC;KAC9C;AAED,IAAA,OAAO,OAAO,YAA6B,KAA0B;;;;;;;;;;QAWnE,IAAI,MAAM,CAAC,WAAW;AAAE,YAAA,OAAO,OAAO,CAAC,YAAY,CAAC,CAAC;QAErD,IAAI,MAAM,CAAC,aAAa,EAAE;YACxB,OAAO,CAAC,YAAY,CAAC,CAAC;AACvB,SAAA;AAED,QAAA,OAAO,KAAoB,CAAC;AAC9B,KAAC,CAAC;AACJ,CAAC;AACD;;;AAGG;AACH,SAAS,YAAY,CAAC,QAA+B,EAAA;IACnD,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;AAC3D,IAAA,IAAI,QAAQ,CAAC,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE;AACxC,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IACD,OAAO;AACT,CAAC;AAUD;;;;;AAKG;AACH,SAAS,cAAc,CAAC,SAAiB,EAAA;IACvC,MAAM,eAAe,GAAG,SAAS,CAAC,KAAK,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC;IAC1D,MAAM,cAAc,GAAG,CAAG,EAAA,eAAe,CAAC,IAAI,EAAE,CAAG,CAAA,CAAA,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;AAChF,IAAA,MAAM,aAAa,GAAG,cAAc,CAAC,GAAG,CAAC,CAAC,QAAQ,KAChD,CAAC,CAAC,CAAC,GAAG,EAAE,KAAK,CAAC,MAAM,EAAE,CAAC,GAAG,GAAG,KAAK,EAAE,CAAC,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CACnE,CAAC;;AAEF,IAAA,OAAO,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,MAAK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAM,CAAC,CAAK,EAAA,CAAC,EAAG,EAAE,EAAE,CAAC,CAAC;AAC9D,CAAC;AAED;AAEA;;;;;;AAMG;AAEa,SAAA,+CAA+C,CAC7D,UAA2B,EAC3B,MAAyB,EAAA;;IAGzB,IAAI,QAAQ,GAAG,iBAAiB,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;IAErD,MAAM,+CAAgD,SAAQN,0BAAiB,CAAA;QAC7E,WAAmB,CAAA,UAAyB,EAAE,OAA6B,EAAA;AACzE,YAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SAC5B;QAEM,MAAM,WAAW,CAAC,WAA4B,EAAA;AACnD,YAAA,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,UAAU,CAAC,EAAE;AACzD,gBAAA,MAAM,IAAI,KAAK,CACb,sFAAsF,CACvF,CAAC;AACH,aAAA;YAED,MAAM,gBAAgB,GAAG,QAAQ,CAAC;AAClC,YAAA,MAAM,KAAK,GAAG,CACZ,MAAM,gBAAgB,CAAC;gBACrB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,gBAAA,cAAc,EAAE;oBACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,iBAAA;aACF,CAAC,EACF,KAAK,CAAC;AACR,YAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CAAC,SAAS,CAAC,eAAe,CAAC,aAAa,EAAE,CAAA,OAAA,EAAU,KAAK,CAAA,CAAE,CAAC,CAAC;YAEpF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;YAEjE,IAAI,CAAA,QAAQ,KAAA,IAAA,IAAR,QAAQ,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAR,QAAQ,CAAE,MAAM,MAAK,GAAG,EAAE;AAC5B,gBAAA,MAAM,SAAS,GAAG,YAAY,CAAC,QAAQ,CAAC,CAAC;AACzC,gBAAA,IAAI,SAAS,EAAE;AACb,oBAAA,MAAM,aAAa,GAAc,cAAc,CAAC,SAAS,CAAC,CAAC;oBAC3D,MAAM,eAAe,GAAG,aAAa,CAAC,WAAW,GAAG,SAAS,CAAC,YAAY,CAAC;oBAC3E,MAAM,aAAa,GAAGF,mBAAU,CAAC,KAAK,CAAC,aAAa,CAAC,iBAAiB,CAAC,CAAC;oBACxE,MAAM,YAAY,GAAG,aAAa,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;AACzD,oBAAA,MAAM,QAAQ,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;oBACjC,MAAM,oBAAoB,GAAG,iBAAiB,CAAC,UAAU,EAAE,eAAe,CAAC,CAAC;AAE5E,oBAAA,MAAM,iBAAiB,GAAG,CACxB,MAAM,oBAAoB,CAAC;wBACzB,WAAW,EAAE,WAAW,CAAC,WAAW;AACpC,wBAAA,cAAc,EAAE;4BACd,cAAc,EAAE,WAAW,CAAC,cAAc;AAC3C,yBAAA;AACD,wBAAA,QAAQ,EAAE,QAAQ;qBACnB,CAAC,EACF,KAAK,CAAC;oBAER,QAAQ,GAAG,oBAAoB,CAAC;AAChC,oBAAA,WAAW,CAAC,OAAO,CAAC,GAAG,CACrB,SAAS,CAAC,eAAe,CAAC,aAAa,EACvC,CAAA,OAAA,EAAU,iBAAiB,CAAA,CAAE,CAC9B,CAAC;oBACF,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAClD,iBAAA;AACF,aAAA;AAED,YAAA,OAAO,QAAQ,CAAC;SACjB;AACF,KAAA;IAED,OAAO;AACL,QAAA,MAAM,EAAE,CAAC,UAAyB,EAAE,OAA6B,KAAI;AACnE,YAAA,OAAO,IAAI,+CAA+C,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;SACjF;KACF,CAAC;AACJ;;AC9VA;AAgGA;;;;AAIG;AACG,SAAU,cAAc,CAAC,QAAiB,EAAA;AAC9C,IAAA,IAAI,CAAC,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAC7C,QAAA,OAAO,KAAK,CAAC;AACd,KAAA;IAED,MAAM,YAAY,GAAG,QAAwB,CAAC;IAE9C,QACE,KAAK,CAAC,OAAO,CAAC,YAAY,CAAC,SAAS,CAAC;AACrC,QAAA,OAAO,YAAY,CAAC,OAAO,KAAK,QAAQ;AACxC,QAAA,OAAO,YAAY,CAAC,sBAAsB,KAAK,UAAU,EACzD;AACJ,CAAC;AAED;;;;;;;AAOG;MACU,QAAQ,CAAA;AAUnB;;;;;AAKG;IACH,WAAY,CAAA,SAAiC,EAAE,OAAA,GAA2B,EAAE,EAAA;AAC1E,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;;;AAG3B,QAAA,IAAI,CAAC,OAAO,GACP,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,UAAU,EAAE,OAAO,CAAC,UAAU,IAAI,0BAA0B,EAAE,GAC/D,CAAC;KACH;AAED;;;;;AAKG;IACI,sBAAsB,GAAA;QAC3B,OAAO;AACL,YAAA,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,UAAU;YACnC,sBAAsB,EAAE,IAAI,CAAC,SAAS;SACvC,CAAC;KACH;AACF,CAAA;AAgCD;;;;;;AAMG;SACa,WAAW,CACzB,UAA+E,EAC/E,kBAA0C,EAAE,EAAA;;IAE5C,IAAI,UAAU,KAAK,SAAS,EAAE;AAC5B,QAAA,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;AACxC,KAAA;;;;IAMD,MAAM,eAAe,GAAG,IAAI,sBAAsB,CAAC,eAAe,CAAC,gBAAgB,CAAC,CAAC;AACrF,IAAA,MAAM,SAAS,GAA2B;QACxCS,sBAAa,CAAC,EAAE,SAAS,EAAE,eAAe,CAAC,eAAe,EAAE,CAAC;AAC7D,QAAAC,wBAAe,CAAC,eAAe,CAAC,gBAAgB,CAAC;QACjD,eAAe;AACf,QAAAC,sCAA6B,EAAE;AAC/B,QAAA,IAAI,2BAA2B,EAAE;AACjC,QAAA,IAAI,yBAAyB,CAAC,eAAe,CAAC,YAAY,CAAC;;;;QAI3DC,8BAAqB,CAAC,SAAS,EAAE,EAAE,UAAU,EAAE,GAAG,EAAE,CAAC;AACrD,QAAAC,kBAAS,CAAC;YACR,MAAM,EAAE,MAAM,CAAC,IAAI;AACnB,YAAA,kBAAkB,EAAE,oCAAoC;AACxD,YAAA,sBAAsB,EAAE,wCAAwC;SACjE,CAAC;KACH,CAAC;AAEF,IAAA,IAAIZ,eAAM,EAAE;;QAEV,SAAS,CAAC,IAAI,CAACa,oBAAW,CAAC,eAAe,CAAC,YAAY,CAAC,CAAC,CAAC;AAC1D,QAAA,SAAS,CAAC,IAAI,CAACC,2CAAkC,EAAE,CAAC,CAAC;AACtD,KAAA;AACD,IAAA,SAAS,CAAC,IAAI,CACZC,0BAAiB,CAAC,UAAU,CAAC;AAC3B,UAAE,gBAAgB,CACd,+CAA+C,CAC7C,UAAU,EACV,CAAA,EAAA,GAAA,eAAe,CAAC,QAAQ,MAAI,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,EAAA,GAAA,kBAAkB,CAC/C,EACD,UAAU,CACX;UACD,UAAU,CACf,CAAC;AAEF,IAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,eAAe,CAAC,CAAC;AAClD;;ACzPA;AASA;;AAEG;AACG,MAAO,gCAAiC,SAAQ,gBAAgB,CAAA;AAMpE;;;;;AAKG;AACH,IAAA,WAAA,CACE,UAAyB,EACzB,OAA6B,EAC7B,OAAmC,EAAA;AAEnC,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;KACxB;AAED;;;;AAIG;AACO,IAAA,WAAW,CAAC,OAAoB,EAAA;AACxC,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,SAAS,EAAE,IAAI,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC;QAEzE,IACE,OAAO,CAAC,IAAI;AACZ,aAAC,OAAO,OAAO,CAAC,IAAI,KAAK,QAAQ,IAAK,OAAO,CAAC,IAAe,KAAK,SAAS,CAAC;AAC5E,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EACvB;AACA,YAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,eAAe,CAAC,cAAc,EAAE,MAAM,CAAC,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,MAAM,YAAY,GAChB;AACE,YAAA,OAAO,CAAC,MAAM,CAAC,WAAW,EAAE;YAC5B,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,gBAAgB,CAAC;YACpE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,cAAc,CAAC;YAClE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,WAAW,CAAC;YAC/D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,YAAY,CAAC;YAChE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,IAAI,CAAC;YACxD,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,iBAAiB,CAAC;YACrE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC5D,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,aAAa,CAAC;YACjE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,mBAAmB,CAAC;YACvE,IAAI,CAAC,oBAAoB,CAAC,OAAO,EAAE,eAAe,CAAC,KAAK,CAAC;SAC1D,CAAC,IAAI,CAAC,IAAI,CAAC;YACZ,IAAI;AACJ,YAAA,IAAI,CAAC,6BAA6B,CAAC,OAAO,CAAC;AAC3C,YAAA,IAAI,CAAC,8BAA8B,CAAC,OAAO,CAAC,CAAC;QAE/C,MAAM,SAAS,GAAW,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AACvE,QAAA,OAAO,CAAC,OAAO,CAAC,GAAG,CACjB,eAAe,CAAC,aAAa,EAC7B,aAAa,IAAI,CAAC,OAAO,CAAC,WAAW,IAAI,SAAS,CAAA,CAAE,CACrD,CAAC;;;;;AAMF,QAAA,OAAO,OAAO,CAAC;KAChB;AAED;;;;;;AAMG;IACK,oBAAoB,CAAC,OAAoB,EAAE,UAAkB,EAAA;QACnE,MAAM,KAAK,GAAG,OAAO,CAAC,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;QAE9C,IAAI,CAAC,KAAK,EAAE;AACV,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;;;;QAKD,IAAI,UAAU,KAAK,eAAe,CAAC,cAAc,IAAI,KAAK,KAAK,GAAG,EAAE;AAClE,YAAA,OAAO,EAAE,CAAC;AACX,SAAA;AAED,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,6BAA6B,CAAC,OAAoB,EAAA;AACxD,QAAA,IAAI,YAAY,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,CAAC,KAAK,KAAI;AACjE,YAAA,OAAO,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,UAAU,CAAC,eAAe,CAAC,kBAAkB,CAAC,CAAC;AACjF,SAAC,CAAC,CAAC;QAEH,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAY;AACjC,YAAA,OAAO,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,aAAa,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,CAAC;AAClE,SAAC,CAAC,CAAC;;AAGH,QAAA,YAAY,GAAG,YAAY,CAAC,MAAM,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,KAAI;YACzD,IAAI,KAAK,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,WAAW,EAAE,KAAK,KAAK,CAAC,KAAK,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACjF,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AACD,YAAA,OAAO,IAAI,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,IAAI,gCAAgC,GAAW,EAAE,CAAC;AAClD,QAAA,YAAY,CAAC,OAAO,CAAC,CAAC,MAAM,KAAI;AAC9B,YAAA,gCAAgC,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI;AAC/C,iBAAA,WAAW,EAAE;iBACb,SAAS,EAAE,IAAI,MAAM,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAA,EAAA,CAAI,CAAC;AAChD,SAAC,CAAC,CAAC;AAEH,QAAA,OAAO,gCAAgC,CAAC;KACzC;AAED;;;;AAIG;AACK,IAAA,8BAA8B,CAAC,OAAoB,EAAA;QACzD,MAAM,IAAI,GAAG,UAAU,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC;QAE5C,IAAI,2BAA2B,GAAW,EAAE,CAAC;QAC7C,2BAA2B,IAAI,CAAI,CAAA,EAAA,IAAI,CAAC,OAAO,CAAC,WAAW,CAAA,EAAG,IAAI,CAAA,CAAE,CAAC;QAErE,MAAM,OAAO,GAAG,aAAa,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAC3C,MAAM,gBAAgB,GAA8B,EAAE,CAAC;AACvD,QAAA,IAAI,OAAO,EAAE;YACX,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE;AACzB,gBAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AACtD,oBAAA,MAAM,YAAY,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC;oBACvC,gBAAgB,CAAC,YAAY,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC;AAC9C,oBAAA,SAAS,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;AAC9B,iBAAA;AACF,aAAA;YAED,SAAS,CAAC,IAAI,EAAE,CAAC;AACjB,YAAA,KAAK,MAAM,GAAG,IAAI,SAAS,EAAE;AAC3B,gBAAA,2BAA2B,IAAI,CAAA,EAAA,EAAK,GAAG,CAAA,CAAA,EAAI,kBAAkB,CAAC,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAA,CAAE,CAAC;AACxF,aAAA;AACF,SAAA;AAED,QAAA,OAAO,2BAA2B,CAAC;KACpC;AACF;;AChLD;AASA;;;;AAIG;AACG,MAAO,0BAA2B,SAAQ,UAAU,CAAA;AAWxD;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,UAAkB,EAAA;AACjD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;QAC/B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;AAKG;IACI,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,gCAAgC,CAAC,UAAU,EAAE,OAAO,EAAE,IAAI,CAAC,CAAC;KACxE;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;QAC3C,OAAOC,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KAC5F;AACF;;ACzDD;;;;;;AAMG;AAKH,MAAM,WAAW,GAAG,oBAAoB,CAAC;AACzC,MAAM,cAAc,GAAG,SAAS,CAAC;AAEpB,MAAA,oBAAqB,SAAQvU,mBAAQ,CAAC,aAAa,CAAA;AAI9D;;;;;AAKG;IACH,WAAY,CAAA,GAAW,EAAE,OAAqC,EAAA;QAC5D,IAAI,GAAG,KAAK,SAAS,EAAE;AACrB,YAAA,MAAM,IAAI,KAAK,CAAC,sBAAsB,CAAC,CAAC;AACzC,SAAA;;QAGD,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,MAAM,gBAAgB,GAAGA,mBAAQ,CAAC,wBAAwB,EAAE,CAAC;YAC7D,OAAO,CAAC,SAAS,GAAG,CAAG,EAAA,WAAW,IAAI,cAAc,CAAA,CAAA,EAAI,gBAAgB,CAAA,CAAE,CAAC;AAC5E,SAAA;AAED,QAAA,KAAK,CAAC,SAAS,EAAE,OAAO,CAAC,CAAC;AAE1B,QAAA,IAAI,CAAC,kBAAkB,GAAG,iCAAiC,CAAC;QAE5D,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,QAAQ,IAAI,OAAO,CAAC;;AAG3C,QAAA,IAAI,CAAC,GAAG,GAAG,GAAG,CAAC;;QAGf,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,YAAY,CAAC;KAChD;AACF;;ACnDD;AAqBA;;;AAGG;MACmB,aAAa,CAAA;AAyBjC;;;;AAIG;IACH,WAAsB,CAAA,GAAW,EAAE,QAAsB,EAAA;;AAEvD,QAAA,IAAI,CAAC,GAAG,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,WAAW,GAAG,qBAAqB,CAAC,GAAG,CAAC,CAAC;AAC9C,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,oBAAoB,GAAG,IAAI,oBAAoB,CAClD,IAAI,CAAC,GAAG,EACR,QAAQ,CAAC,sBAAsB,EAAE,CAClC,CAAC;AAEF,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,OAAO,CAAC,CAAC;AAE7D,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAC5C,KAAK,MAAM,OAAO,IAAI,IAAI,CAAC,QAAQ,CAAC,SAAS,EAAE;AAC7C,YAAA,IACE,CAACuT,eAAM,IAAI,OAAO,YAAY,0BAA0B;gBACxD,OAAO,YAAY,mBAAmB,EACtC;AACA,gBAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC;AAC3B,aAAA;AAAM,iBAAA,IAAIe,0BAAiB,CAAE,OAAe,CAAC,UAAU,CAAC,EAAE;;;AAGzD,gBAAA,IAAI,CAAC,UAAU,GAAI,OAAe,CAAC,UAAU,CAAC;AAC/C,aAAA;AACF,SAAA;;AAGD,QAAA,MAAM,oBAAoB,GAAG,IAAI,CAAC,oBAA2B,CAAC;AAC9D,QAAA,oBAAoB,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AACF;;ACrFD;AAMA;;;AAGG;AACI,MAAM,UAAU,GAAGE,8BAAkB,CAAC;AAC3C,IAAA,aAAa,EAAE,oBAAoB;AACnC,IAAA,SAAS,EAAE,mBAAmB;AAC/B,CAAA,CAAC,CAAC;AAEH;;;;;;AAMG;AACG,SAAU,kCAAkC,CAChD,OAA0B,EAAA;;IAE1B,OAAO;;QAEL,WAAW,EAAE,CAAC,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAsB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,WAAW;QAC1D,cAAc,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,cAAc,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,cAAc;KACxD,CAAC;AACJ;;AC9BA;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAiGE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA6CzC;AAlMC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;oBACxC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;oBAC9B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBAChD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC1C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,uBAAuB,IAAI,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAsC,EAAA;AACvD,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;QACpD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;AACjC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,kBAAkB,CAAC,MAAM,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,kBAAkB,CAAC,aAAa,GAAG,IAAI,CAAC;AACzC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,kBAAkB,CAAC,GAAG,GAAG,IAAI,CAAC;AAC/B,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;AAChC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,kBAAkB,CAAC,OAAO,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,kBAAkB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACjD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,kBAAkB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC3C,SAAA;AACD,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAyDD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC/MD;AACA;AAEA;;;;;;AAMG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;AAExC;;AAEG;QACI,IAAY,CAAA,YAAA,GAAY,KAAK,CAAC;KAqDtC;AAhOC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,IAAI,IAAI,WAAW,EAAE;AAC9B,YAAA,QAAQ,IAAI;AACV,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACrD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC/C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;oBAC5C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,sBAAsB,IAAI,CAAA,CAAE,CAAC,CAAC;AACtD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAA2C,EAAA;AAC5D,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;QAC9D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,uBAAuB,CAAC,KAAK,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;AACvC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,uBAAuB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC9C,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,uBAAuB,CAAC,GAAG,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,uBAAuB,CAAC,IAAI,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;AACxC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,uBAAuB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACtD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,uBAAuB,CAAC,eAAe,GAAG,IAAI,CAAC;AAChD,SAAA;QACD,IAAI,cAAc,CAAC,YAAY,EAAE;AAC/B,YAAA,uBAAuB,CAAC,YAAY,GAAG,IAAI,CAAC;AAC7C,SAAA;AACD,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAmED;;;;;;;AAOG;IACI,QAAQ,GAAA;QACb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,YAAY,EAAE;AACrB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;AC3OD;AAMA;;;;;AAKG;MACU,2BAA2B,CAAA;AAgBtC;;;;AAIG;IACH,WAAY,CAAA,WAAmB,EAAE,iBAAoC,EAAA;AACnE,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,iBAAiB,GAAG,iBAAiB,CAAC;AAC3C,QAAA,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;KAC3D;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,YAAoB,EAAA;;QAG3C,OAAOD,iBAAU,CAAC,QAAQ,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;KACrF;AACF;;ACjDD;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,eAAe,CAAC,OAAmB,EAAA;IACjD,OAAO,OAAO,CAAC,GAAG,GAAG,CAAA,EAAG,OAAO,CAAC,KAAK,IAAI,OAAO,CAAC,GAAG,CAAE,CAAA,GAAG,OAAO,CAAC,KAAK,CAAC;AACzE;;AC5BA;AAOA;;AAEG;AACSE,6BAUX;AAVD,CAAA,UAAY,WAAW,EAAA;AACrB;;AAEG;AACH,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AAEf;;AAEG;AACH,IAAA,WAAA,CAAA,cAAA,CAAA,GAAA,YAA2B,CAAA;AAC7B,CAAC,EAVWA,mBAAW,KAAXA,mBAAW,GAUtB,EAAA,CAAA,CAAA,CAAA;AA4FD;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAsN7B,IAAA,WAAA,CACE,OAAe,EACf,SAAiB,EACjB,oBAAyD,EACzD,QAAiB,EACjB,aAAsB,EACtB,QAAsB,EACtB,QAAe,EACf,SAAgB,EAChB,OAAoB,EACpB,UAAmB,EACnB,QAAiB,EACjB,YAAqB,EACrB,kBAA2B,EAC3B,eAAwB,EACxB,eAAwB,EACxB,WAAoB,EACpB,iBAAqC,EACrC,0BAAmC,EACnC,aAAsB,EACtB,eAAwB,EAAA;AAExB,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;AACvB,QAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;QAE3B,IAAI,oBAAoB,KAAK,SAAS,IAAI,OAAO,oBAAoB,KAAK,QAAQ,EAAE;;AAElF,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;AACpD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACxD,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,SAAS,CAAC;AAChD,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,OAAO,CAAC;AACjD,YAAA,IAAI,CAAC,UAAU,GAAG,oBAAoB,CAAC,UAAU,CAAC;AAClD,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,QAAQ,CAAC;AAC9C,YAAA,IAAI,CAAC,YAAY,GAAG,oBAAoB,CAAC,YAAY,CAAC;AACtD,YAAA,IAAI,CAAC,kBAAkB,GAAG,oBAAoB,CAAC,kBAAkB,CAAC;AAClE,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,eAAe,CAAC;AAC5D,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC,WAAW,CAAC;YAEpD,IAAI,oBAAoB,CAAC,iBAAiB,EAAE;gBAC1C,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBACvE,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,cAAc,CAAC;gBAC5E,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,eAAe,CAAC;gBAC9E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;gBAC1E,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,iBAAiB,CAAC,aAAa,CAAC;AAE1E,gBAAA,IAAI,CAAC,0BAA0B,GAAG,oBAAoB,CAAC,0BAA0B,CAAC;AAClF,gBAAA,IAAI,CAAC,aAAa,GAAG,oBAAoB,CAAC,aAAa,CAAC;AACzD,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC3B,YAAA,IAAI,CAAC,WAAW,GAAG,oBAAoB,CAAC;AACxC,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC;AAC5B,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,YAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,YAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;AACjC,YAAA,IAAI,CAAC,kBAAkB,GAAG,kBAAkB,CAAC;AAC7C,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,YAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAE/B,YAAA,IAAI,iBAAiB,EAAE;AACrB,gBAAA,IAAI,CAAC,SAAS,GAAG,iBAAiB,CAAC,cAAc,CAAC;AAClD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,cAAc,GAAG,iBAAiB,CAAC,cAAc,CAAC;AACvD,gBAAA,IAAI,CAAC,eAAe,GAAG,iBAAiB,CAAC,eAAe,CAAC;AACzD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AACrD,gBAAA,IAAI,CAAC,aAAa,GAAG,iBAAiB,CAAC,aAAa,CAAC;AAErD,gBAAA,IAAI,CAAC,0BAA0B,GAAG,0BAA0B,CAAC;AAC7D,gBAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACpC,aAAA;AACF,SAAA;KACF;AA1JD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,IAAI,IAAI,CAAC,YAAY,EAAE;YACrB,OAAO;AACL,gBAAA,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,GAAG;AAC1B,gBAAA,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,KAAK;aAC/B,CAAC;AACH,SAAA;AACD,QAAA,OAAO,SAAS,CAAC;KAClB;AA+ID;;;AAGG;IACI,QAAQ,GAAA;AACb,QAAA,MAAM,MAAM,GAAa;YACvB,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,IAAI;YACJ,KAAK;YACL,OAAO;YACP,OAAO;YACP,KAAK;YACL,KAAK;YACL,KAAK;YACL,KAAK;YACL,IAAI;YACJ,IAAI;YACJ,KAAK;YACL,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,MAAM;YACN,OAAO;YACP,MAAM;SACP,CAAC;QACF,MAAM,OAAO,GAAa,EAAE,CAAC;AAE7B,QAAA,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE;AAC1B,YAAA,QAAQ,KAAK;AACX,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;oBAC3D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,QAAQ,GAAG,oBAAoB,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,CAAC,GAAG,SAAS,CACvE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,SAAS,GAAG,oBAAoB,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,GAAG,SAAS,CACzE,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,OAAO,GAAG,eAAe,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,SAAS,CACzD,CAAC;oBACF,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;oBAC9D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;gBACR,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,cAAc,CAAC,CAAC;oBAClE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,cAAc,GAAG,oBAAoB,CAAC,IAAI,CAAC,cAAc,EAAE,KAAK,CAAC,GAAG,SAAS,CACnF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAC1B,OAAO,EACP,KAAK,EACL,IAAI,CAAC,eAAe,GAAG,oBAAoB,CAAC,IAAI,CAAC,eAAe,EAAE,KAAK,CAAC,GAAG,SAAS,CACrF,CAAC;oBACF,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;gBACR,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;oBAC5D,MAAM;AACR,gBAAA,KAAK,IAAI;oBACP,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,KAAK;oBACR,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;oBAC7D,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;oBAChE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,kBAAkB,CAAC,CAAC;oBACtE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,eAAe,CAAC,CAAC;oBACnE,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC;oBAC/D,MAAM;AACR,gBAAA,KAAK,OAAO;oBACV,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,0BAA0B,CAAC,CAAC;oBAC9E,MAAM;AACR,gBAAA,KAAK,MAAM;oBACT,IAAI,CAAC,uBAAuB,CAAC,OAAO,EAAE,KAAK,EAAE,IAAI,CAAC,aAAa,CAAC,CAAC;oBACjE,MAAM;AACT,aAAA;AACF,SAAA;AACD,QAAA,OAAO,OAAO,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;KAC1B;AAED;;;;;;AAMG;AACK,IAAA,uBAAuB,CAAC,OAAiB,EAAE,GAAW,EAAE,KAAc,EAAA;QAC5E,IAAI,CAAC,KAAK,EAAE;YACV,OAAO;AACR,SAAA;AAED,QAAA,GAAG,GAAG,kBAAkB,CAAC,GAAG,CAAC,CAAC;AAC9B,QAAA,KAAK,GAAG,kBAAkB,CAAC,KAAK,CAAC,CAAC;QAClC,IAAI,GAAG,CAAC,MAAM,GAAG,CAAC,IAAI,KAAK,CAAC,MAAM,GAAG,CAAC,EAAE;YACtC,OAAO,CAAC,IAAI,CAAC,CAAA,EAAG,GAAG,CAAI,CAAA,EAAA,KAAK,CAAE,CAAA,CAAC,CAAC;AACjC,SAAA;KACF;AACF;;AC9jBD;SAsPgB,8BAA8B,CAC5C,sBAA8C,EAC9C,sCAAsF,EACtF,WAAoB,EAAA;AAEpB,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAElG,IAAA,MAAM,mBAAmB,GACvB,sCAAsC,YAAY,0BAA0B;AAC1E,UAAE,sCAAsC;UACtC,SAAS,CAAC;AAChB,IAAA,IAAI,2BAAoE,CAAC;AAEzE,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,WAAW,KAAK,SAAS,EAAE;QAClE,2BAA2B,GAAG,IAAI,2BAA2B,CAC3D,WAAW,EACX,sCAA2D,CAC5D,CAAC;AACH,KAAA;AAED,IAAA,IAAI,mBAAmB,KAAK,SAAS,IAAI,2BAA2B,KAAK,SAAS,EAAE;AAClF,QAAA,MAAM,SAAS,CAAC,gEAAgE,CAAC,CAAC;AACnF,KAAA;;IAGD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,SAAA;AACF,KAAA;;;;IAKD,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;;YAEL,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AAAM,iBAAA;AACL,gBAAA,OAAO,yCAAyC,CAC9C,sBAAsB,EACtB,2BAA4B,CAC7B,CAAC;AACH,aAAA;AACF,SAAA;AACF,KAAA;IAED,IAAI,OAAO,IAAI,YAAY,EAAE;QAC3B,IAAI,mBAAmB,KAAK,SAAS,EAAE;AACrC,YAAA,OAAO,sCAAsC,CAAC,sBAAsB,EAAE,mBAAmB,CAAC,CAAC;AAC5F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,kGAAkG,CACnG,CAAC;AACH,SAAA;AACF,KAAA;AAED,IAAA,MAAM,IAAI,UAAU,CAAC,oCAAoC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;IAC3B,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;AAChB,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;QACT,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAEtE,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,CACnC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;;AAeG;AACH,SAAS,sCAAsC,CAC7C,sBAA8C,EAC9C,mBAA+C,EAAA;AAE/C,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;IAE1F,IACE,CAAC,sBAAsB,CAAC,UAAU;QAClC,EAAE,sBAAsB,CAAC,WAAW,IAAI,sBAAsB,CAAC,SAAS,CAAC,EACzE;AACA,QAAA,MAAM,IAAI,UAAU,CAClB,uGAAuG,CACxG,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,mBAAmB,CAAC,WAAW,EAC/B,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;AACD,QAAA,sBAAsB,CAAC,UAAU;AACjC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;QACtC,sBAAsB,CAAC,YAAY,GAAG,sBAAsB,CAAC,YAAY,GAAG,EAAE;QAC9E,sBAAsB,CAAC,kBAAkB,GAAG,sBAAsB,CAAC,kBAAkB,GAAG,EAAE;QAC1F,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,eAAe,GAAG,sBAAsB,CAAC,eAAe,GAAG,EAAE;QACpF,sBAAsB,CAAC,WAAW,GAAG,sBAAsB,CAAC,WAAW,GAAG,EAAE;AAC7E,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAEtE,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,SAAS,EACT,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,CAC9C,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,CACrC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;AAaG;AACH,SAAS,yCAAyC,CAChD,sBAA8C,EAC9C,2BAAwD,EAAA;AAExD,IAAA,sBAAsB,GAAG,wCAAwC,CAAC,sBAAsB,CAAC,CAAC;;IAG1F,IAAI,CAAC,sBAAsB,CAAC,WAAW,IAAI,CAAC,sBAAsB,CAAC,SAAS,EAAE;AAC5E,QAAA,MAAM,IAAI,UAAU,CAClB,yGAAyG,CAC1G,CAAC;AACH,KAAA;IAED,IAAI,QAAQ,GAAW,GAAG,CAAC;AAC3B,IAAA,IAAI,SAAS,GAAG,sBAAsB,CAAC,YAAY,CAAC;IACpD,IAAI,sBAAsB,CAAC,QAAQ,EAAE;QACnC,QAAQ,GAAG,GAAG,CAAC;QACf,IAAI,sBAAsB,CAAC,YAAY,EAAE;YACvC,QAAQ,GAAG,IAAI,CAAC;AACjB,SAAA;aAAM,IAAI,sBAAsB,CAAC,SAAS,EAAE;YAC3C,QAAQ,GAAG,IAAI,CAAC;AAChB,YAAA,SAAS,GAAG,sBAAsB,CAAC,SAAS,CAAC;AAC9C,SAAA;AACF,KAAA;;AAGD,IAAA,IAAI,mBAAuC,CAAC;IAC5C,IAAI,sBAAsB,CAAC,WAAW,EAAE;QACtC,IAAI,sBAAsB,CAAC,QAAQ,EAAE;AACnC,YAAA,mBAAmB,GAAG,kBAAkB,CAAC,KAAK,CAC5C,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACjD,sBAAsB,CAAC,WAAW,CAAC,QAAQ,EAAE,CAC9C,CAAC,QAAQ,EAAE,CAAC;AACd,SAAA;AACF,KAAA;;AAGD,IAAA,MAAM,YAAY,GAAG;AACnB,QAAA,mBAAmB,GAAG,mBAAmB,GAAG,EAAE;AAC9C,QAAA,sBAAsB,CAAC,QAAQ;cAC3B,oBAAoB,CAAC,sBAAsB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC9D,cAAE,EAAE;AACN,QAAA,sBAAsB,CAAC,SAAS;cAC5B,oBAAoB,CAAC,sBAAsB,CAAC,SAAS,EAAE,KAAK,CAAC;AAC/D,cAAE,EAAE;AACN,QAAA,gBAAgB,CACd,2BAA2B,CAAC,WAAW,EACvC,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,QAAQ,CAChC;QACD,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;QAC5D,2BAA2B,CAAC,iBAAiB,CAAC,cAAc;cACxD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,cAAc,EAAE,KAAK,CAAC;AAC3F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,eAAe;cACzD,oBAAoB,CAAC,2BAA2B,CAAC,iBAAiB,CAAC,eAAe,EAAE,KAAK,CAAC;AAC5F,cAAE,EAAE;QACN,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;QAC3D,2BAA2B,CAAC,iBAAiB,CAAC,aAAa;AAC3D,QAAA,sBAAsB,CAAC,0BAA0B;QACjD,SAAS;AACT,QAAA,sBAAsB,CAAC,aAAa;AACpC,QAAA,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC,sBAAsB,CAAC,OAAO,CAAC,GAAG,EAAE;QACrF,sBAAsB,CAAC,QAAQ,GAAG,sBAAsB,CAAC,QAAQ,GAAG,EAAE;AACtE,QAAA,sBAAsB,CAAC,OAAO;QAC9B,QAAQ;QACR,SAAS;AACT,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,YAAY;AACnC,QAAA,sBAAsB,CAAC,kBAAkB;AACzC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,eAAe;AACtC,QAAA,sBAAsB,CAAC,WAAW;AACnC,KAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAEb,MAAM,SAAS,GAAG,2BAA2B,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;AAC9E,IAAA,OAAO,IAAI,kBAAkB,CAC3B,sBAAsB,CAAC,OAAQ,EAC/B,SAAS,EACT,mBAAmB,EACnB,SAAS,EACT,SAAS,EACT,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,QAAQ,EAC/B,sBAAsB,CAAC,SAAS,EAChC,sBAAsB,CAAC,OAAO,EAC9B,sBAAsB,CAAC,UAAU,EACjC,QAAQ,EACR,sBAAsB,CAAC,YAAY,EACnC,sBAAsB,CAAC,kBAAkB,EACzC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,eAAe,EACtC,sBAAsB,CAAC,WAAW,EAClC,2BAA2B,CAAC,iBAAiB,EAC7C,sBAAsB,CAAC,0BAA0B,EACjD,sBAAsB,CAAC,aAAa,EACpC,sBAAsB,CAAC,eAAe,CACvC,CAAC;AACJ,CAAC;AAED,SAAS,gBAAgB,CAAC,WAAmB,EAAE,aAAqB,EAAE,QAAiB,EAAA;;;IAGrF,MAAM,QAAQ,GAAa,CAAC,CAAA,MAAA,EAAS,WAAW,CAAI,CAAA,EAAA,aAAa,CAAE,CAAA,CAAC,CAAC;AACrE,IAAA,IAAI,QAAQ,EAAE;AACZ,QAAA,QAAQ,CAAC,IAAI,CAAC,IAAI,QAAQ,CAAA,CAAE,CAAC,CAAC;AAC/B,KAAA;AACD,IAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AAC3B,CAAC;AAED,SAAS,wCAAwC,CAC/C,sBAA8C,EAAA;AAE9C,IAAA,MAAM,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,sBAAsB,CAAC,OAAO,GAAG,eAAe,CAAC;AAClG,IAAA,IAAI,sBAAsB,CAAC,YAAY,IAAI,OAAO,GAAG,YAAY,EAAE;AACjE,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,YAAY,EAAE;AACxF,QAAA,MAAM,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAC5E,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,SAAS,IAAI,OAAO,GAAG,YAAY,EAAE;AAC9D,QAAA,MAAM,UAAU,CAAC,+DAA+D,CAAC,CAAC;AACnF,KAAA;IACD,IAAI,sBAAsB,CAAC,QAAQ,KAAK,SAAS,IAAI,sBAAsB,CAAC,SAAS,EAAE;AACrF,QAAA,MAAM,UAAU,CAAC,qDAAqD,CAAC,CAAC;AACzE,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,qBAAqB;QACxD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,aAAa;QAChD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,eAAe;QAClD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,sBAAsB,CAAC,WAAW;QAClC,sBAAsB,CAAC,WAAW,CAAC,GAAG;QACtC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,kEAAkE,CAAC,CAAC;AACtF,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AAClC,SAAC,sBAAsB,CAAC,WAAW,CAAC,IAAI,IAAI,sBAAsB,CAAC,WAAW,CAAC,OAAO,CAAC,EACvF;AACA,QAAA,MAAM,UAAU,CAAC,6EAA6E,CAAC,CAAC;AACjG,KAAA;IAED,IACE,OAAO,GAAG,YAAY;AACtB,QAAA,sBAAsB,CAAC,WAAW;AACjC,QAAA,sBAAsB,CAAC,WAAuC,CAAC,YAAY,EAC5E;AACA,QAAA,MAAM,UAAU,CAAC,sEAAsE,CAAC,CAAC;AAC1F,KAAA;IAED,IACE,OAAO,GAAG,YAAY;SACrB,sBAAsB,CAAC,0BAA0B,IAAI,sBAAsB,CAAC,aAAa,CAAC,EAC3F;AACA,QAAA,MAAM,UAAU,CACd,mGAAmG,CACpG,CAAC;AACH,KAAA;AAED,IAAA,IAAI,sBAAsB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACpE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,sBAAsB,CAAC,OAAO,GAAG,OAAO,CAAC;AACzC,IAAA,OAAO,sBAAsB,CAAC;AAChC;;AC3iCA;AAgGA;;AAEG;MACU,eAAe,CAAA;AAwB1B;;;;AAIG;IACH,WAAY,CAAA,MAAoC,EAAE,OAAgB,EAAA;AAChE,QAAA,MAAM,aAAa,GAAG,IAAI,oBAAoB,CAC5C,MAAM,CAAC,GAAG,EACT,MAAc,CAAC,QAAQ,CAAC,sBAAsB,EAAE,CAClD,CAAC;AACF,QAAA,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC;AAEvB,QAAA,IAAK,MAAqB,CAAC,IAAI,KAAK,SAAS,EAAE;AAC7C,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;YACzB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,aAAa,CAAC,CAAC;AAC/D,SAAA;AAAM,aAAA;AACL,YAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;YAC1B,IAAI,CAAC,yBAAyB,GAAG,IAAIC,MAAW,CAAC,aAAa,CAAC,CAAC;AACjE,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAGC,qBAAY,EAAE,CAAC;AAC1B,SAAA;AACD,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AA1CD;;;;AAIG;AACH,IAAA,IAAW,OAAO,GAAA;QAChB,OAAO,IAAI,CAAC,QAAQ,CAAC;KACtB;AAED;;;;AAIG;AACH,IAAA,IAAW,GAAG,GAAA;QACZ,OAAO,IAAI,CAAC,IAAI,CAAC;KAClB;AA4BD;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,YAAY,CACvB,QAAgB,EAChB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,iBACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EACR,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,eAAe,EAAE,IAAI,CAAC,QAAQ,EAC3B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,WAAW,CACtB,eAAuB,EACvB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC/D,IAAI,CAAC,QAAQ,EACb,eAAe,kBAEb,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EAExD,CAAC;AACF,YAAA,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC;AAChC,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CAAC,OAAA,GAAiC,EAAE,EAAA;;AAC3D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,kBACpE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,UAAU,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACzD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,kBAClE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,WAAmB,EACnB,UAAiC,EAAE,EAAA;;AAEnC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QAEnF,IACE,IAAI,CAAC,YAAY;AACjB,aAAC,CAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,OAAO,MAAK,QAAQ;AACvE,iBAAC,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,KAAI,CAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,WAAW,MAAK,QAAQ,CAAC;AACjF,iBAAA,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,CAAA,CAAC,EACpC;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,yJAAyJ,CAC1J,CAAC;AACH,SAAA;QAED,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAA,MAAA,CAAA,MAAA,CAAA,EACpB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EACX,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CACtD,CAAC;YACF,OAAO,MAAM,IAAI,CAAC,yBAAyB,CAAC,UAAU,CAAC,gBAAgB,CAAC,CAAC;AAC1E,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACrYD;AAoCA;;;;AAIG;AACG,MAAO,uBAAwB,SAAQC,eAAQ,CAAA;AAWnD;;;;;;;;;AASG;IACH,WACE,CAAA,MAA6B,EAC7B,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;QAE5C,KAAK,CAAC,EAAE,aAAa,EAAE,OAAO,CAAC,aAAa,EAAE,CAAC,CAAC;QAtB1C,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAoDpB,QAAA,IAAA,CAAA,iBAAiB,GAAG,CAAC,IAAY,KAAI;AAC3C,YAAA,IAAI,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AAClC,gBAAA,IAAI,CAAC,OAAO,CAAC,iBAAiB,GAAG,SAAS,CAAC;AAC3C,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,kBAAkB,CAAC,MAAM,CAAC,CAAC;AACvC,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;gBACxB,OAAO;AACR,aAAA;;;;AAKD,YAAA,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,MAAM,CAAC;YAC3B,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,gBAAA,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,CAAC;AAC5D,aAAA;AACD,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;AACpB,gBAAA,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAC;AACrB,aAAA;AACH,SAAC,CAAC;AAEM,QAAA,IAAA,CAAA,uBAAuB,GAAG,CAAC,GAAW,KAAI;AAChD,YAAA,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,YAAY,EAAE;AACpC,gBAAA,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAClB,OAAO;AACR,aAAA;;;;;;YAOD,IAAI,CAAC,yBAAyB,EAAE,CAAC;YACjC,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC,KAAK,IAAI,CAAC,GAAG,EAAE;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,aAAA;AAAM,iBAAA,IAAI,IAAI,CAAC,MAAM,IAAI,IAAI,CAAC,GAAG,EAAE;;;;AAIlC,gBAAA,IAAI,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,gBAAgB,EAAE;AACxC,oBAAA,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC;AAClB,oBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC;AACrB,yBAAA,IAAI,CAAC,CAAC,SAAS,KAAI;AAClB,wBAAA,IAAI,CAAC,MAAM,GAAG,SAAS,CAAC;wBACxB,IAAI,CAAC,sBAAsB,EAAE,CAAC;wBAC9B,OAAO;AACT,qBAAC,CAAC;AACD,yBAAA,KAAK,CAAC,CAAC,KAAK,KAAI;AACf,wBAAA,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;AACtB,qBAAC,CAAC,CAAC;AACN,iBAAA;AAAM,qBAAA;oBACL,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,CAAA,mHAAA,EACE,IAAI,CAAC,MAAM,GAAG,CAChB,CAAA,sBAAA,EAAyB,IAAI,CAAC,GAAG,CAAA,WAAA,EAAc,IAAI,CAAC,OAAO,CAAA,eAAA,EACzD,IAAI,CAAC,gBACP,CAAA,CAAE,CACH,CACF,CAAC;AACH,iBAAA;AACF,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,OAAO,CACV,IAAI,KAAK,CACP,4FACE,IAAI,CAAC,GACP,CAAsB,mBAAA,EAAA,IAAI,CAAC,MAAM,GAAG,CAAC,CAAE,CAAA,CACxC,CACF,CAAC;AACH,aAAA;AACH,SAAC,CAAC;AAnGA,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC;AACpB,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,GAAG,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,CAAC;AAC9B,QAAA,IAAI,CAAC,gBAAgB;AACnB,YAAA,OAAO,CAAC,gBAAgB,IAAI,OAAO,CAAC,gBAAgB,IAAI,CAAC,GAAG,OAAO,CAAC,gBAAgB,GAAG,CAAC,CAAC;AAC3F,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC;QAEvB,IAAI,CAAC,sBAAsB,EAAE,CAAC;KAC/B;IAEM,KAAK,GAAA;AACV,QAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC;KACtB;IAEO,sBAAsB,GAAA;QAC5B,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACvD;IAEO,yBAAyB,GAAA;QAC/B,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,MAAM,EAAE,IAAI,CAAC,iBAAiB,CAAC,CAAC;QAC3D,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;QAChE,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,IAAI,CAAC,uBAAuB,CAAC,CAAC;KACnE;IA0ED,QAAQ,CAAC,KAAmB,EAAE,QAAiC,EAAA;;QAE7D,IAAI,CAAC,yBAAyB,EAAE,CAAC;AAChC,QAAA,IAAI,CAAC,MAAmB,CAAC,OAAO,EAAE,CAAC;AAEpC,QAAA,QAAQ,CAAC,KAAK,KAAK,IAAI,GAAG,SAAS,GAAG,KAAK,CAAC,CAAC;KAC9C;AACF;;AClLD;AAoBA;;;;;;;;;AASG;MACU,oBAAoB,CAAA;AA6d/B;;;;;;;;AAQG;IACH,WACE,CAAA,gBAA4C,EAC5C,MAA4B,EAC5B,MAAc,EACd,KAAa,EACb,OAAA,GAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QACzC,IAAI,CAAC,kBAAkB,GAAG,IAAI,uBAAuB,CACnD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,MAAM,EACN,MAAM,EACN,KAAK,EACL,OAAO,CACR,CAAC;KACH;AApfD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;AAIG;AACH,IAAA,IAAW,gBAAgB,GAAA;AACzB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,gBAAgB,CAAC;KAC/C;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;AAIG;AACH,IAAA,IAAW,oCAAoC,GAAA;AAC7C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,oCAAoC,CAAC;KACnE;AAED;;;;AAIG;AACH,IAAA,IAAW,iCAAiC,GAAA;AAC1C,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iCAAiC,CAAC;KAChE;AAED;;;;AAIG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;AAIG;AACH,IAAA,IAAW,2BAA2B,GAAA;AACpC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,2BAA2B,CAAC;KAC1D;AAED;;;;AAIG;AACH,IAAA,IAAW,sBAAsB,GAAA;AAC/B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,sBAAsB,CAAC;KACrD;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AA8BF;;ACphBD;AACA;AAEO,MAAM,qBAAqB,GAAW,EAAE,CAAC;AACzC,MAAM,eAAe,GAAe,IAAI,UAAU,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,CAAC;AACrE,MAAM,cAAc,GAAW,YAAY,CAAC;AAC5C,MAAM,eAAe,GAAW,aAAa;;ACNpD;AACA;MAqBa,UAAU,CAAA;AACrB;;;;;;AAMG;IACI,aAAa,cAAc,CAChC,MAAoB,EACpB,MAAc,EACd,OAAA,GAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC9E,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,MAAM,EAAE;AAC3B,YAAA,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;AACpC,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AAED;;;;;AAKG;IACK,aAAa,QAAQ,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAChE,QAAA,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC;KACf;;;;IAKO,aAAa,cAAc,CACjC,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,IAAI,iBAAiB,GAAG,CAAC,CAAC;AAC1B,QAAA,IAAI,IAAI,EAAE,YAAY,EAAE,mBAAmB,CAAC;QAE5C,GAAG;YACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAClD,YAAA,YAAY,GAAG,IAAI,GAAG,IAAI,CAAC;YAC3B,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,KAAK,iBAAiB,CAAC;YACpD,iBAAiB,IAAI,CAAC,CAAC;AACxB,SAAA,QAAQ,YAAY,IAAI,iBAAiB,GAAG,EAAE,EAAE;AAEjD,QAAA,IAAI,YAAY,EAAE;;;YAGhB,aAAa,GAAG,aAAa,CAAC;AAC9B,YAAA,mBAAmB,GAAG,SAAS,CAAC;YAChC,GAAG;gBACD,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAClD,aAAa,IAAI,CAAC,IAAI,GAAG,IAAI,IAAI,mBAAmB,CAAC;AACrD,gBAAA,mBAAmB,IAAI,GAAG,CAAC;aAC5B,QAAQ,IAAI,GAAG,IAAI,EAAE;YAEtB,MAAM,GAAG,GAAG,CAAC,aAAa,GAAG,CAAC,GAAG,EAAE,aAAa,GAAG,CAAC,CAAC,GAAG,aAAa,IAAI,CAAC,CAAC;YAC3E,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,IAAI,GAAG,GAAG,MAAM,CAAC,gBAAgB,EAAE;AAClE,gBAAA,MAAM,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;AACtC,aAAA;AACD,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAED,QAAA,OAAO,CAAC,aAAa,IAAI,CAAC,IAAI,EAAE,aAAa,GAAG,CAAC,CAAC,CAAC;KACpD;IAEM,aAAa,QAAQ,CAC1B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,OAAO,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACnD;IAEM,aAAa,QAAQ,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC;KACb;IAEM,aAAa,WAAW,CAC7B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,CAAC,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACrD,IAAI,CAAC,KAAK,CAAC,EAAE;AACX,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;aAAM,IAAI,CAAC,KAAK,CAAC,EAAE;AAClB,YAAA,OAAO,KAAK,CAAC;AACd,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yBAAyB,CAAC,CAAC;AAC5C,SAAA;KACF;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;AAEnC,QAAA,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;AAClE,QAAA,MAAM,IAAI,GAAG,IAAI,QAAQ,CAAC,KAAK,CAAC,MAAM,EAAE,KAAK,CAAC,UAAU,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;QAC5E,OAAO,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;KACjC;IAEM,aAAa,SAAS,CAC3B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,IAAI,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QACxD,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;AAED,QAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KAChE;IAEM,aAAa,UAAU,CAC5B,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC1D,QAAA,MAAM,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;AACtC,QAAA,OAAO,WAAW,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;KAClC;IAEO,aAAa,WAAW,CAC9B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,GAAG,GAAG,MAAM,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;;QAEzD,MAAM,KAAK,GAAG,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACpD,QAAA,OAAO,EAAE,GAAG,EAAE,KAAK,EAAE,CAAC;KACvB;IAEM,aAAa,OAAO,CACzB,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA8B,GAAA,EAAE,KACJ;YAC5B,OAAO,UAAU,CAAC,WAAW,CAAC,CAAC,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;AACzD,SAAC,CAAC;AAEF,QAAA,MAAM,KAAK,GAAsB,MAAM,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;QAE7F,MAAM,IAAI,GAAsB,EAAE,CAAC;AACnC,QAAA,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE;YACxB,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC7B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;IAEO,aAAa,SAAS,CAC5B,MAAoB,EACpB,cAAgF,EAChF,OAAA,GAAiC,EAAE,EAAA;QAEnC,MAAM,KAAK,GAAQ,EAAE,CAAC;AACtB,QAAA,KACE,IAAI,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EACtD,KAAK,KAAK,CAAC,EACX,KAAK,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,EAClD;YACA,IAAI,KAAK,GAAG,CAAC,EAAE;;gBAEb,MAAM,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAC3C,KAAK,GAAG,CAAC,KAAK,CAAC;AAChB,aAAA;YAED,OAAO,KAAK,EAAE,EAAE;gBACd,MAAM,IAAI,GAAM,MAAM,cAAc,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACtD,gBAAA,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;KACd;AACF,CAAA;AAOD,IAAK,WAOJ,CAAA;AAPD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,WAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,WAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACjB,CAAC,EAPI,WAAW,KAAX,WAAW,GAOf,EAAA,CAAA,CAAA,CAAA;AAYD,IAAK,aASJ,CAAA;AATD,CAAA,UAAK,aAAa,EAAA;AAChB,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACnB,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACjB,IAAA,aAAA,CAAA,OAAA,CAAA,GAAA,OAAe,CAAA;AACf,IAAA,aAAA,CAAA,QAAA,CAAA,GAAA,QAAiB,CAAA;AACnB,CAAC,EATI,aAAa,KAAb,aAAa,GASjB,EAAA,CAAA,CAAA,CAAA;MAEqB,QAAQ,CAAA;AAS5B;;AAEG;IACI,OAAO,UAAU,CAAC,MAAuB,EAAA;AAC9C,QAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;AAC1C,SAAA;AAAM,aAAA,IAAI,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE;AAChC,YAAA,OAAO,QAAQ,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC;AACzC,SAAA;AAAM,aAAA;AACL,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,MAAsB,CAAC,CAAC;AAC1D,SAAA;KACF;IAEO,OAAO,gBAAgB,CAAC,MAAc,EAAA;AAC5C,QAAA,QAAQ,MAAM;YACZ,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,OAAO,CAAC;YAC3B,KAAK,aAAa,CAAC,GAAG,CAAC;YACvB,KAAK,aAAa,CAAC,IAAI,CAAC;YACxB,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM,CAAC;YAC1B,KAAK,aAAa,CAAC,KAAK,CAAC;YACzB,KAAK,aAAa,CAAC,MAAM;AACvB,gBAAA,OAAO,IAAI,iBAAiB,CAAC,MAAuB,CAAC,CAAC;AACxD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,MAAM,CAAA,CAAE,CAAC,CAAC;AACrD,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,MAAa,EAAA;AAC1C,QAAA,OAAO,IAAI,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,QAAQ,CAAC,UAAU,CAAC,CAAC,CAAC;KAC3D;IAEO,OAAO,gBAAgB,CAAC,MAAoB,EAAA;AAClD,QAAA,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC;;QAEzB,IAAI;AACF,YAAA,OAAO,QAAQ,CAAC,gBAAgB,CAAC,IAAI,CAAC,CAAC;AACxC,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;;AAElB,SAAA;AAED,QAAA,QAAQ,IAAI;YACV,KAAK,WAAW,CAAC,MAAM;gBACrB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE;AAChB,oBAAA,MAAM,IAAI,KAAK,CAAC,sDAAsD,MAAM,CAAA,CAAE,CAAC,CAAC;AACjF,iBAAA;;gBAGD,MAAM,MAAM,GAA6B,EAAE,CAAC;AAC5C,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,KAAK,MAAM,KAAK,IAAI,MAAM,CAAC,MAAM,EAAE;AACjC,oBAAA,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;AACtD,iBAAA;gBACD,OAAO,IAAI,cAAc,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC;YACjD,KAAK,WAAW,CAAC,IAAI;gBACnB,IAAI,MAAM,CAAC,OAAO,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,+CAA+C,MAAM,CAAA,CAAE,CAAC,CAAC;AAC1E,iBAAA;AACD,gBAAA,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE;AACnB,oBAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,MAAM,CAAA,CAAE,CAAC,CAAC;AACpF,iBAAA;AACD,gBAAA,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC;YAC1C,KAAK,WAAW,CAAC,GAAG;AAClB,gBAAA,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE;AAClB,oBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,MAAM,CAAA,CAAE,CAAC,CAAC;AACnF,iBAAA;AACD,gBAAA,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;AAC7D,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA,KAAK,WAAW,CAAC,KAAK,CAAC;AACvB,YAAA;gBACE,MAAM,IAAI,KAAK,CAAC,CAAA,qBAAA,EAAwB,IAAI,CAAO,IAAA,EAAA,MAAM,CAAE,CAAA,CAAC,CAAC;AAChE,SAAA;KACF;AACF,CAAA;AAED,MAAM,iBAAkB,SAAQ,QAAQ,CAAA;AAGtC,IAAA,WAAA,CAAY,SAAwB,EAAA;AAClC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,UAAU,GAAG,SAAS,CAAC;KAC7B;AAEM,IAAA,IAAI,CACT,MAAoB,EACpB,OAAA,GAAiC,EAAE,EAAA;QAEnC,QAAQ,IAAI,CAAC,UAAU;YACrB,KAAK,aAAa,CAAC,IAAI;AACrB,gBAAA,OAAO,UAAU,CAAC,QAAQ,EAAE,CAAC;YAC/B,KAAK,aAAa,CAAC,OAAO;gBACxB,OAAO,UAAU,CAAC,WAAW,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YACjD,KAAK,aAAa,CAAC,GAAG;gBACpB,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC7C,KAAK,aAAa,CAAC,IAAI;gBACrB,OAAO,UAAU,CAAC,QAAQ,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC9C,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAChD,KAAK,aAAa,CAAC,KAAK;gBACtB,OAAO,UAAU,CAAC,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;YAC/C,KAAK,aAAa,CAAC,MAAM;gBACvB,OAAO,UAAU,CAAC,UAAU,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAChD,YAAA;AACE,gBAAA,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;AAC7C,SAAA;KACF;AACF,CAAA;AAED,MAAM,YAAa,SAAQ,QAAQ,CAAA;AAGjC,IAAA,WAAA,CAAY,OAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC;KACzB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,KAAK,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AACxD,QAAA,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC;KAC7B;AACF,CAAA;AAED,MAAM,aAAc,SAAQ,QAAQ,CAAA;AAGlC,IAAA,WAAA,CAAY,KAAiB,EAAA;AAC3B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,MAAM,GAAG,KAAK,CAAC;KACrB;AAEM,IAAA,MAAM,IAAI,CACf,MAAoB,EACpB,UAAiC,EAAE,EAAA;QAEnC,MAAM,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC5D,QAAA,OAAO,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACrD;AACF,CAAA;AAED,MAAM,WAAY,SAAQ,QAAQ,CAAA;AAGhC,IAAA,WAAA,CAAY,QAAkB,EAAA;AAC5B,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;KAC3B;AAEM,IAAA,IAAI,CAAC,MAAoB,EAAE,OAAA,GAAiC,EAAE,EAAA;AACnE,QAAA,MAAM,cAAc,GAAG,CACrB,CAAe,EACf,IAA4B,KACF;YAC1B,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC;AACtC,SAAC,CAAC;QACF,OAAO,UAAU,CAAC,OAAO,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,CAAC,CAAC;KAC5D;AACF,CAAA;AAED,MAAM,cAAe,SAAQ,QAAQ,CAAA;IAInC,WAAY,CAAA,MAAgC,EAAE,IAAY,EAAA;AACxD,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,OAAO,GAAG,MAAM,CAAC;AACtB,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC;KACnB;AAEM,IAAA,MAAM,IAAI,CAAC,MAAoB,EAAE,UAAiC,EAAE,EAAA;QACzE,MAAM,MAAM,GAAkC,EAAE,CAAC;AACjD,QAAA,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC;AAC/B,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,IAAI,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,EAAE;AAC3D,gBAAA,MAAM,CAAC,GAAG,CAAC,GAAG,MAAM,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAC7D,aAAA;AACF,SAAA;AACD,QAAA,OAAO,MAAM,CAAC;KACf;AACF;;ACtcD;AACA;AAOgB,SAAA,WAAW,CAAC,CAAa,EAAE,CAAa,EAAA;IACtD,IAAI,CAAC,KAAK,CAAC;AAAE,QAAA,OAAO,IAAI,CAAC;;AAEzB,IAAA,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,IAAI;AAAE,QAAA,OAAO,KAAK,CAAC;AACzC,IAAA,IAAI,CAAC,CAAC,MAAM,KAAK,CAAC,CAAC,MAAM;AAAE,QAAA,OAAO,KAAK,CAAC;AAExC,IAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACjC,IAAI,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAAE,YAAA,OAAO,KAAK,CAAC;AACjC,KAAA;AACD,IAAA,OAAO,IAAI,CAAC;AACd;;AClBA;MA6Ba,UAAU,CAAA;AAuCrB,IAAA,WAAA,CACE,UAAwB,EACxB,YAA2B,EAC3B,kBAA2B,EAC3B,uBAAgC,EAAA;AAEhC,QAAA,IAAI,CAAC,WAAW,GAAG,UAAU,CAAC;AAC9B,QAAA,IAAI,CAAC,aAAa,GAAG,YAAY,IAAI,UAAU,CAAC;AAChD,QAAA,IAAI,CAAC,YAAY,GAAG,KAAK,CAAC;AAC1B,QAAA,IAAI,CAAC,YAAY,GAAG,kBAAkB,IAAI,CAAC,CAAC;AAC5C,QAAA,IAAI,CAAC,YAAY,GAAG,uBAAuB,IAAI,CAAC,CAAC;AACjD,QAAA,IAAI,CAAC,mBAAmB,GAAG,kBAAkB,IAAI,CAAC,CAAC;KACpD;AAhCD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AAGD,IAAA,IAAW,WAAW,GAAA;QACpB,OAAO,IAAI,CAAC,YAAY,CAAC;KAC1B;AA2BO,IAAA,MAAM,UAAU,CAAC,OAAA,GAA4B,EAAE,EAAA;AACrD,QAAA,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,eAAe,CAAC,MAAM,EAAE;YACzF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;AACH,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,EAAE,eAAe,CAAC,EAAE;AACzC,YAAA,MAAM,IAAI,KAAK,CAAC,6BAA6B,CAAC,CAAC;AAChD,SAAA;;;AAID,QAAA,IAAI,CAAC,SAAS,GAAG,MAAM,UAAU,CAAC,OAAO,CAAC,IAAI,CAAC,aAAa,EAAE,UAAU,CAAC,UAAU,EAAE;YACnF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;QAGH,MAAM,KAAK,GAAG,IAAI,CAAC,SAAU,CAAC,cAAc,CAAC,CAAC;AAC9C,QAAA,IAAI,EAAE,KAAK,KAAK,SAAS,IAAI,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,EAAE;AAChE,YAAA,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC7C,SAAA;;AAGD,QAAA,IAAI,CAAC,WAAW,GAAG,MAAM,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,aAAa,EAAE,qBAAqB,EAAE;YAC5F,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAGH,QAAA,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,SAAU,CAAC,eAAe,CAAC,CAAC,CAAC;QAC5D,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;AAE7C,QAAA,IAAI,IAAI,CAAC,YAAY,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AAC1E,SAAA;QAED,IAAI,CAAC,sBAAsB,GAAG,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;YACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,SAAA,CAAC,CAAC;;AAEH,QAAA,MAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAElF,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC;QACzB,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;AAC9C,YAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,EAAE;AAC1C,gBAAA,MAAM,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;gBAClF,IAAI,CAAC,sBAAuB,EAAE,CAAC;AAChC,aAAA;AACF,SAAA;KACF;IAEM,OAAO,GAAA;QACZ,OAAO,CAAC,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,CAAC;KAC/D;IAEa,YAAY,CACxB,UAA4B,EAAE,EAAA;;AAE9B,YAAA,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE;AACtB,gBAAA,MAAAuB,aAAA,CAAM,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAA,CAAC;AAChC,aAAA;AAED,YAAA,OAAO,IAAI,CAAC,OAAO,EAAE,EAAE;AACrB,gBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,IAAI,CAAC,SAAU,CAAC,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;oBAC1D,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,iBAAA,CAAC,CAAA,CAAC;gBAEH,IAAI,CAAC,sBAAuB,EAAE,CAAC;gBAC/B,IAAI,CAAC,YAAa,EAAE,CAAC;AAErB,gBAAA,IAAI,IAAI,CAAC,sBAAsB,KAAK,CAAC,EAAE;AACrC,oBAAA,MAAM,MAAM,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,cAAc,CAAC,IAAI,CAAC,WAAW,EAAE,qBAAqB,EAAE;wBACtF,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,qBAAA,CAAC,CAAA,CAAC;AAEH,oBAAA,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,WAAW,CAAC,QAAQ,CAAC;AACzE,oBAAA,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC;oBAEtB,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,WAAY,EAAE,MAAM,CAAC,EAAE;AAC3C,wBAAA,MAAM,IAAI,KAAK,CAAC,kCAAkC,CAAC,CAAC;AACrD,qBAAA;oBAED,IAAI;wBACF,IAAI,CAAC,sBAAsB,GAAG,MAAMA,aAAA,CAAA,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE;4BACxE,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,yBAAA,CAAC,CAAA,CAAC;AACJ,qBAAA;AAAC,oBAAA,OAAO,GAAQ,EAAE;;AAEjB,wBAAA,IAAI,CAAC,sBAAsB,GAAG,CAAC,CAAC;AACjC,qBAAA;AAED,oBAAA,IAAI,IAAI,CAAC,sBAAuB,GAAG,CAAC,EAAE;;AAEpC,wBAAA,MAAAA,aAAA,CAAM,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAA,CAAC;AACnF,qBAAA;AACF,iBAAA;gBACD,MAAM,MAAAA,aAAA,CAAA,MAAM,CAAA,CAAC;AACd,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AACF;;AClLD;AACA;MAesB,YAAY,CAAA;AAGjC;;ACnBD;AAMA,MAAM,WAAW,GAAG,IAAIpB,0BAAU,CAAC,2CAA2C,CAAC,CAAC;AAE1E,MAAO,sBAAuB,SAAQ,YAAY,CAAA;AAWtD,IAAA,WAAA,CAAY,QAA+B,EAAA;AACzC,QAAA,KAAK,EAAE,CAAC;AACR,QAAA,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;AAC1B,QAAA,IAAI,CAAC,SAAS,GAAG,CAAC,CAAC;KACpB;AAXO,IAAA,YAAY,CAAC,IAAqB,EAAA;AACxC,QAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,YAAA,OAAO,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAOD,IAAA,IAAW,QAAQ,GAAA;QACjB,OAAO,IAAI,CAAC,SAAS,CAAC;KACvB;AACM,IAAA,MAAM,IAAI,CAAC,IAAY,EAAE,UAAmC,EAAE,EAAA;;AACnE,QAAA,IAAI,MAAA,OAAO,CAAC,WAAW,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,OAAO,EAAE;AAChC,YAAA,MAAM,WAAW,CAAC;AACnB,SAAA;QAED,IAAI,IAAI,GAAG,CAAC,EAAE;AACZ,YAAA,MAAM,IAAI,KAAK,CAAC,sCAAsC,IAAI,CAAA,CAAE,CAAC,CAAC;AAC/D,SAAA;QAED,IAAI,IAAI,KAAK,CAAC,EAAE;YACd,OAAO,IAAI,UAAU,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE;AAC5B,YAAA,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;AAC/C,SAAA;;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACxC,QAAA,IAAI,KAAK,EAAE;AACT,YAAA,IAAI,CAAC,SAAS,IAAI,KAAK,CAAC,MAAM,CAAC;;AAE/B,YAAA,OAAO,IAAI,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;;YAEL,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,KAAI;;gBAErC,MAAM,OAAO,GAAe,MAAK;oBAC/B,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;oBAC5D,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBACvD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;oBACrD,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;oBAEvD,IAAI,OAAO,CAAC,WAAW,EAAE;wBACvB,OAAO,CAAC,WAAY,CAAC,mBAAmB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AACjE,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,gBAAgB,GAAe,MAAK;oBACxC,MAAM,aAAa,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAChD,oBAAA,IAAI,aAAa,EAAE;AACjB,wBAAA,IAAI,CAAC,SAAS,IAAI,aAAa,CAAC,MAAM,CAAC;AACvC,wBAAA,OAAO,EAAE,CAAC;;wBAEV,OAAO,CAAC,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAC,CAAC;AAC3C,qBAAA;AACH,iBAAC,CAAC;gBAEF,MAAM,cAAc,GAAe,MAAK;AACtC,oBAAA,OAAO,EAAE,CAAC;AACV,oBAAA,MAAM,EAAE,CAAC;AACX,iBAAC,CAAC;gBAEF,MAAM,YAAY,GAAe,MAAK;AACpC,oBAAA,OAAO,EAAE,CAAC;oBACV,MAAM,CAAC,WAAW,CAAC,CAAC;AACtB,iBAAC,CAAC;gBAEF,IAAI,CAAC,SAAS,CAAC,EAAE,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAChD,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,KAAK,EAAE,cAAc,CAAC,CAAC;gBAC3C,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;gBAC7C,IAAI,OAAO,CAAC,WAAW,EAAE;oBACvB,OAAO,CAAC,WAAY,CAAC,gBAAgB,CAAC,OAAO,EAAE,YAAY,CAAC,CAAC;AAC9D,iBAAA;;AAEH,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AACF;;AC/FD;AA6BA;;;;AAIG;AACG,MAAO,oBAAqB,SAAQmB,eAAQ,CAAA;AAQhD;;;;;AAKG;IACH,WAAmB,CAAA,MAA6B,EAAE,OAAA,GAAuC,EAAE,EAAA;AACzF,QAAA,KAAK,EAAE,CAAC;QAXF,IAAU,CAAA,UAAA,GAAY,IAAI,CAAC;AAYjC,QAAA,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;AACrB,QAAA,IAAI,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,CAAC;AACrC,QAAA,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC;AAC/B,QAAA,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,IAAI,sBAAsB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC;AAC1E,QAAA,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;KACpF;IAEM,KAAK,GAAA;QACV,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,YAAY,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,KAAI;AAChC,gBAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAC1B,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;AAEO,IAAA,MAAM,YAAY,GAAA;AACxB,QAAA,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC;AACxB,QAAA,IAAI,QAAQ,CAAC;QACb,GAAG;YACD,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,CAAC;YACtC,IAAI,QAAQ,CAAC,IAAI,EAAE;gBACjB,MAAM;AACP,aAAA;AACD,YAAA,MAAM,GAAG,GAAG,QAAQ,CAAC,KAAK,CAAC;AAC3B,YAAA,MAAM,MAAM,GAAI,GAAW,CAAC,OAAO,CAAC;AACpC,YAAA,IAAI,OAAO,MAAM,KAAK,QAAQ,EAAE;AAC9B,gBAAA,MAAM,KAAK,CAAC,gCAAgC,CAAC,CAAC;AAC/C,aAAA;AAED,YAAA,QAAQ,MAAM;AACZ,gBAAA,KAAK,0DAA0D;AAC7D,oBAAA;AACE,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,IAAI,YAAY,UAAU,KAAK,KAAK,EAAE;AACxC,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EAAE;AACjC,4BAAA,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC;AACxB,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,wDAAwD;AAC3D,oBAAA;AACE,wBAAA,MAAM,YAAY,GAAI,GAAW,CAAC,YAAY,CAAC;AAC/C,wBAAA,IAAI,OAAO,YAAY,KAAK,QAAQ,EAAE;AACpC,4BAAA,MAAM,KAAK,CAAC,+CAA+C,CAAC,CAAC;AAC9D,yBAAA;wBACD,IAAI,IAAI,CAAC,UAAU,EAAE;4BACnB,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,YAAY,EAAE,CAAC,CAAC;AAChD,yBAAA;AACF,qBAAA;oBACD,MAAM;AACR,gBAAA,KAAK,mDAAmD;oBACtD,IAAI,IAAI,CAAC,UAAU,EAAE;AACnB,wBAAA,MAAM,UAAU,GAAI,GAAW,CAAC,UAAU,CAAC;AAC3C,wBAAA,IAAI,OAAO,UAAU,KAAK,QAAQ,EAAE;AAClC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,UAAU,EAAE,CAAC,CAAC;AAC9C,qBAAA;AACD,oBAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;oBAChB,MAAM;AACR,gBAAA,KAAK,qDAAqD;oBACxD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,wBAAA,MAAM,KAAK,GAAI,GAAW,CAAC,KAAK,CAAC;AACjC,wBAAA,IAAI,OAAO,KAAK,KAAK,SAAS,EAAE;AAC9B,4BAAA,MAAM,KAAK,CAAC,qCAAqC,CAAC,CAAC;AACpD,yBAAA;AACD,wBAAA,MAAM,IAAI,GAAI,GAAW,CAAC,IAAI,CAAC;AAC/B,wBAAA,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;AAC5B,4BAAA,MAAM,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACnD,yBAAA;AACD,wBAAA,MAAM,WAAW,GAAI,GAAW,CAAC,WAAW,CAAC;AAC7C,wBAAA,IAAI,OAAO,WAAW,KAAK,QAAQ,EAAE;AACnC,4BAAA,MAAM,KAAK,CAAC,2CAA2C,CAAC,CAAC;AAC1D,yBAAA;AACD,wBAAA,MAAM,QAAQ,GAAI,GAAW,CAAC,QAAQ,CAAC;AACvC,wBAAA,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;AAChC,4BAAA,MAAM,KAAK,CAAC,wCAAwC,CAAC,CAAC;AACvD,yBAAA;wBACD,IAAI,CAAC,OAAO,CAAC;4BACX,QAAQ;4BACR,IAAI;AACJ,4BAAA,OAAO,EAAE,KAAK;4BACd,WAAW;AACZ,yBAAA,CAAC,CAAC;AACJ,qBAAA;oBACD,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,KAAK,CAAC,CAAA,eAAA,EAAkB,MAAM,CAAA,yBAAA,CAA2B,CAAC,CAAC;AACpE,aAAA;SACF,QAAQ,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;KAC9C;AACF;;AC/ID;AAkBA;;;;;AAKG;MACU,iBAAiB,CAAA;AAkY5B;;;;;AAKG;IACH,WACE,CAAA,gBAAwC,EACxC,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;AACzC,QAAA,IAAI,CAAC,kBAAkB,GAAG,IAAI,oBAAoB,CAChD,IAAI,CAAC,gBAAgB,CAAC,kBAAmB,EACzC,OAAO,CACR,CAAC;KACH;AAhZD;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,kBAAkB,GAAA;AAC3B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,kBAAkB,CAAC;KACjD;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;;;;;;;AAWG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;AAKG;AACH,IAAA,IAAW,MAAM,GAAA;AACf,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC;KACrC;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;;AAMG;AACH,IAAA,IAAW,qBAAqB,GAAA;AAC9B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,qBAAqB,CAAC;KACpD;AAED;;;;;;AAMG;AACH,IAAA,IAAW,aAAa,GAAA;AACtB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAC;KAC5C;AAED;;;;;AAKG;AACH,IAAA,IAAW,UAAU,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC;KACzC;AAED;;;;;AAKG;AACH,IAAA,IAAW,WAAW,GAAA;AACpB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAC;KAC1C;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;;AAKG;AACH,IAAA,IAAW,uBAAuB,GAAA;AAChC,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,uBAAuB,CAAC;KACtD;AAED;;;;;AAKG;AACH,IAAA,IAAW,IAAI,GAAA;AACb,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,IAAI,CAAC;KACnC;AAED;;;;AAIG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,iBAAiB,GAAA;AAC1B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC;KAChD;AAED;;;;;;;;AAQG;AACH,IAAA,IAAW,cAAc,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC;KAC7C;AAED;;;;;;AAMG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,QAAQ,CAAC;KACvC;AAED;;;;;AAKG;AACH,IAAA,IAAW,SAAS,GAAA;AAClB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAED;;;;;AAKG;AACH,IAAA,IAAW,eAAe,GAAA;AACxB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAC;KAC9C;AAED;;;;;AAKG;AACH,IAAA,IAAW,OAAO,GAAA;AAChB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC;KACtC;AAED;;;;;AAKG;AACH,IAAA,IAAW,mBAAmB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAC;KAClD;AAED;;;;;AAKG;AACH,IAAA,IAAW,YAAY,GAAA;AACrB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC;KAC3C;AAED;;;;;AAKG;AACH,IAAA,IAAW,QAAQ,GAAA;AACjB,QAAA,OAAO,SAAS,CAAC;KAClB;AAED;;;;;;;AAOG;AACH,IAAA,IAAW,kBAAkB,GAAA;QAC3B,OAAOtB,eAAM,GAAG,IAAI,CAAC,kBAAkB,GAAG,SAAS,CAAC;KACrD;AAED;;AAEG;AACH,IAAA,IAAW,SAAS,GAAA;AAGlB,QAAA,OAAO,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;KACxC;AAqBF;;AC1aD;AAqGA;;;AAGG;AACSwB,+BAcX;AAdD,CAAA,UAAY,aAAa,EAAA;AACvB;;AAEG;AACH,IAAA,aAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,aAAA,CAAA,MAAA,CAAA,GAAA,MAAa,CAAA;AACb;;;AAGG;AACH,IAAA,aAAA,CAAA,SAAA,CAAA,GAAA,SAAmB,CAAA;AACrB,CAAC,EAdWA,qBAAa,KAAbA,qBAAa,GAcxB,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;AACSC,qCA6CX;AA7CD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,IAAA,CAAA,GAAA,IAAS,CAAA;AACT;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACX;;AAEG;AACH,IAAA,mBAAA,CAAA,KAAA,CAAA,GAAA,KAAW,CAAA;AACb,CAAC,EA7CWA,2BAAmB,KAAnBA,2BAAmB,GA6C9B,EAAA,CAAA,CAAA,CAAA;AAEK,SAAU,YAAY,CAC1B,IAA8D,EAAA;IAE9D,IAAI,IAAI,KAAK,SAAS,EAAE;AACtB,QAAA,OAAO,SAAS,CAAC;AAClB,KAAA;IAED,OAAO,IAAkB,CAAC;AAC5B,CAAC;AAEe,SAAA,oBAAoB,CAAC,GAAwB,EAAE,OAAgB,EAAA;AAC7E,IAAA,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE;AACnB,QAAA,MAAM,IAAI,UAAU,CAAC,2DAA2D,CAAC,CAAC;AACnF,KAAA;AAED,IAAA,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC,mBAAmB,EAAE;AACnC,QAAA,GAAG,CAAC,mBAAmB,GAAG,wBAAwB,CAAC;AACpD,KAAA;AACH,CAAC;AA2HD;;AAEG;AACSC,qCASX;AATD,CAAA,UAAY,mBAAmB,EAAA;AAC7B;;AAEG;AACH,IAAA,mBAAA,CAAA,oBAAA,CAAA,GAAA,oCAAyD,CAAA;AACzD;;AAEG;AACH,IAAA,mBAAA,CAAA,wBAAA,CAAA,GAAA,yCAAkE,CAAA;AACpE,CAAC,EATWA,2BAAmB,KAAnBA,2BAAmB,GAS9B,EAAA,CAAA,CAAA;;ACtUD;AACA;AA8EA;;;;;AAKG;AACG,SAAU,sBAAsB,CACpC,QAAqF,EAAA;IAErF,MAAM,SAAS,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,SAAS,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC5E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,MAAM,UAAU,GAAG,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,UAAU,IAAI,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,MAAM;QAC9E,MAAM,EAAE,CAAC,CAAC,KAAK;AACf,QAAA,KAAK,EAAE,CAAC,CAAC,GAAG,GAAG,CAAC,CAAC,KAAK;AACvB,KAAA,CAAC,CAAC,CAAC;IAEJ,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS;QACT,UAAU,EACV,SAAS,EACJ,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CAAC,SAAS,CAAA,EAAA,EACrB,UAAU,EAAE;gBACV,SAAS;gBACT,UAAU;aACX,EAEH,CAAA,EAAA,CAAA,CAAA;AACJ;;AC9GA;AA0EA;;;;;AAKG;AACG,MAAO,0BAA2B,SAAQC,cAG/C,CAAA;AAGC,IAAA,WAAA,CAAY,OAA0C,EAAA;AACpD,QAAA,MAAM,EACJ,UAAU,EACV,UAAU,EACV,YAAY,GAAG,KAAK,EACpB,UAAU,EACV,UAAU,EACV,uBAAuB,GACxB,GAAG,OAAO,CAAC;AAEZ,QAAA,IAAI,KAAgD,CAAC;AAErD,QAAA,IAAI,UAAU,EAAE;YACd,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,KAAK,CAAC;AACtC,SAAA;AAED,QAAA,MAAM,SAAS,GAAG,qCAAqC,CAClD,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,KAAK,KACR,UAAU;YACV,UAAU;AACV,YAAA,uBAAuB,IACvB,CAAC;QAEH,KAAK,CAAC,SAAS,CAAC,CAAC;AAEjB,QAAA,IAAI,OAAO,UAAU,KAAK,UAAU,EAAE;AACpC,YAAA,IAAI,CAAC,UAAU,CAAC,UAAU,CAAC,CAAC;AAC7B,SAAA;AAED,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,KAAK,GAAA;AACV,QAAA,OAAOpB,cAAK,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;KACjC;AACF,CAAA;AAED;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;AACzB,IAAA,MAAM,EAAE,MAAM,EAAE,GAAG,KAAK,CAAC;IACzB,IAAI,KAAK,CAAC,WAAW,EAAE;AACrB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;IAED,IAAI,CAAC,MAAM,EAAE;AACX,QAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AACzB,QAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACrD,KAAA;;AAGD,IAAA,MAAM,KAAK,CAAC,UAAU,CAAC,gBAAgB,CAAC,MAAM,EAAE;QAC9C,WAAW,EAAE,OAAO,CAAC,WAAW;AACjC,KAAA,CAAC,CAAC;AACH,IAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAEzB,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,MAAM,GAAgD,eAAe,MAAM,CAE/E,OAAO,GAAG,EAAE,EAAA;AAEZ,IAAA,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC;IACzB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,uBAAuB,EAAE,GAAG,KAAK,CAAC;AAElE,IAAA,IAAI,CAAC,KAAK,CAAC,SAAS,EAAE;AACpB,QAAA,KAAK,CAAC,SAAS,GAAG,IAAI,CAAC;QACvB,MAAM,MAAM,GAAG,MAAM,UAAU,CAAC,gBAAgB,CAAC,UAAU,EAAE,uBAAuB,CAAC,CAAC;;AAGtF,QAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC;AAC7B,QAAA,IAAI,MAAM,CAAC,UAAU,KAAK,SAAS,EAAE;AACnC,YAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAAM,SAAA,IAAI,CAAC,KAAK,CAAC,WAAW,EAAE;QAC7B,IAAI;AACF,YAAA,MAAM,MAAM,GAAG,MAAM,KAAK,CAAC,UAAU,CAAC,aAAa,CAAC,EAAE,WAAW,EAAE,OAAO,CAAC,WAAW,EAAE,CAAC,CAAC;AAC1F,YAAA,MAAM,EAAE,UAAU,EAAE,YAAY,EAAE,GAAG,MAAM,CAAC;AAC5C,YAAA,MAAM,gBAAgB,GAAG,KAAK,CAAC,YAAY,CAAC;AAC5C,YAAA,IAAI,YAAY,EAAE;AAChB,gBAAA,KAAK,CAAC,YAAY,GAAG,YAAY,CAAC;AACnC,aAAA;YACD,IACE,UAAU,KAAK,SAAS;AACxB,gBAAA,YAAY,KAAK,gBAAgB;AACjC,gBAAA,OAAO,OAAO,CAAC,YAAY,KAAK,UAAU,EAC1C;;AAEA,gBAAA,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;AAC7B,aAAA;iBAAM,IAAI,UAAU,KAAK,SAAS,EAAE;AACnC,gBAAA,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC;AACtB,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;iBAAM,IAAI,UAAU,KAAK,QAAQ,EAAE;AAClC,gBAAA,KAAK,CAAC,KAAK,GAAG,IAAI,KAAK,CACrB,CAAA,+BAAA,EAAkC,MAAM,CAAC,qBAAqB,IAAI,SAAS,CAAA,CAAA,CAAG,CAC/E,CAAC;AACF,gBAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;AACjB,YAAA,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC;AAClB,YAAA,KAAK,CAAC,WAAW,GAAG,IAAI,CAAC;AAC1B,SAAA;AACF,KAAA;AAED,IAAA,OAAO,qCAAqC,CAAC,KAAK,CAAC,CAAC;AACtD,CAAC,CAAC;AAEF;;;;;AAKG;AACH,MAAM,QAAQ,GAAkD,SAAS,QAAQ,GAAA;AAG/E,IAAA,OAAO,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,IAAI,CAAC,KAAK,EAAE,EAAE,CAAC,GAAG,EAAE,KAAK,KAAI;;QAE1D,IAAI,GAAG,KAAK,YAAY,EAAE;AACxB,YAAA,OAAO,SAAS,CAAC;AAClB,SAAA;AACD,QAAA,OAAO,KAAK,CAAC;AACf,KAAC,CAAC,CAAC;AACL,CAAC,CAAC;AAEF;;;AAGG;AACH,SAAS,qCAAqC,CAC5C,KAAoC,EAAA;IAEpC,OAAO;QACL,KAAK,EAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAO,KAAK,CAAE;QACnB,MAAM;QACN,QAAQ;QACR,MAAM;KACP,CAAC;AACJ;;AChPA;AACA;AAkBA;;;;;;AAMG;AACG,SAAU,aAAa,CAAC,MAAa,EAAA;AACzC,IAAA,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;AACrB,QAAA,MAAM,IAAI,UAAU,CAAC,CAAA,sCAAA,CAAwC,CAAC,CAAC;AAChE,KAAA;IACD,IAAI,MAAM,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,CAAC,EAAE;AACrC,QAAA,MAAM,IAAI,UAAU,CAClB,CAAA,iGAAA,CAAmG,CACpG,CAAC;AACH,KAAA;IACD,OAAO,MAAM,CAAC,KAAK;AACjB,UAAE,CAAA,MAAA,EAAS,MAAM,CAAC,MAAM,CAAI,CAAA,EAAA,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,GAAG,CAAC,CAAE,CAAA;AAC9D,UAAE,CAAS,MAAA,EAAA,MAAM,CAAC,MAAM,GAAG,CAAC;AAChC;;ACtCA;AAYA;;AAEG;AACH,IAAK,WAGJ,CAAA;AAHD,CAAA,UAAK,WAAW,EAAA;AACd,IAAA,WAAA,CAAA,WAAA,CAAA,MAAA,CAAA,GAAA,CAAA,CAAA,GAAA,MAAI,CAAA;AACJ,IAAA,WAAA,CAAA,WAAA,CAAA,OAAA,CAAA,GAAA,CAAA,CAAA,GAAA,OAAK,CAAA;AACP,CAAC,EAHI,WAAW,KAAX,WAAW,GAGf,EAAA,CAAA,CAAA,CAAA;AAED;;;;AAIG;MACU,KAAK,CAAA;AAqChB;;;AAGG;AACH,IAAA,WAAA,CAAmB,cAAsB,CAAC,EAAA;AAnC1C;;AAEG;QACK,IAAO,CAAA,OAAA,GAAW,CAAC,CAAC;AAE5B;;AAEG;QACK,IAAS,CAAA,SAAA,GAAW,CAAC,CAAC;AAE9B;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAU,CAAA,UAAA,GAAgB,EAAE,CAAC;AAErC;;;AAGG;AACK,QAAA,IAAA,CAAA,KAAK,GAAgB,WAAW,CAAC,IAAI,CAAC;QAY5C,IAAI,WAAW,GAAG,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,mCAAmC,CAAC,CAAC;AAC3D,SAAA;AACD,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,OAAO,GAAG,IAAIqB,mBAAY,EAAE,CAAC;KACnC;AAED;;;;AAIG;AACI,IAAA,YAAY,CAAC,SAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,YAAW;YAC9B,IAAI;gBACF,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,MAAM,SAAS,EAAE,CAAC;gBAClB,IAAI,CAAC,OAAO,EAAE,CAAC;gBACf,IAAI,CAAC,SAAS,EAAE,CAAC;gBACjB,IAAI,CAAC,eAAe,EAAE,CAAC;AACxB,aAAA;AAAC,YAAA,OAAO,KAAU,EAAE;gBACnB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAC;AACnC,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;AACI,IAAA,MAAM,EAAE,GAAA;AACb,QAAA,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;AAChC,YAAA,OAAO,OAAO,CAAC,OAAO,EAAE,CAAC;AAC1B,SAAA;QAED,IAAI,CAAC,eAAe,EAAE,CAAC;QAEvB,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAEnC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,KAAI;AACjC,gBAAA,IAAI,CAAC,KAAK,GAAG,WAAW,CAAC,KAAK,CAAC;gBAC/B,MAAM,CAAC,KAAK,CAAC,CAAC;AAChB,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;AAGG;IACK,aAAa,GAAA;QACnB,IAAI,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;YACxC,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,CAAC;AACvC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;;AAIG;IACK,eAAe,GAAA;AACrB,QAAA,IAAI,IAAI,CAAC,KAAK,KAAK,WAAW,CAAC,KAAK,EAAE;YACpC,OAAO;AACR,SAAA;QAED,IAAI,IAAI,CAAC,SAAS,IAAI,IAAI,CAAC,UAAU,CAAC,MAAM,EAAE;AAC5C,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO;AACR,SAAA;AAED,QAAA,OAAO,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,WAAW,EAAE;AACtC,YAAA,MAAM,SAAS,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;AACvC,YAAA,IAAI,SAAS,EAAE;AACb,gBAAA,SAAS,EAAE,CAAC;AACb,aAAA;AAAM,iBAAA;gBACL,OAAO;AACR,aAAA;AACF,SAAA;KACF;AACF;;ACrJD;AAUA;;AAEG;AACG,MAAO,aAAc,SAAQN,eAAQ,CAAA;AAgBzC;;;;;;AAMG;AACH,IAAA,WAAA,CACU,OAAiB,EACjB,UAAkB,EAC1B,OAA8B,EAAA;QAE9B,KAAK,CAAC,OAAO,CAAC,CAAC;QAJP,IAAO,CAAA,OAAA,GAAP,OAAO,CAAU;QACjB,IAAU,CAAA,UAAA,GAAV,UAAU,CAAQ;AAI1B,QAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,iBAAiB,GAAG,CAAC,CAAC;;QAG3B,IAAI,aAAa,GAAG,CAAC,CAAC;AACtB,QAAA,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,EAAE;AAC9B,YAAA,aAAa,IAAI,GAAG,CAAC,UAAU,CAAC;AACjC,SAAA;AACD,QAAA,IAAI,aAAa,GAAG,IAAI,CAAC,UAAU,EAAE;AACnC,YAAA,MAAM,IAAI,KAAK,CAAC,iEAAiE,CAAC,CAAC;AACpF,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,KAAK,CAAC,IAAa,EAAA;AACxB,QAAA,IAAI,IAAI,CAAC,iBAAiB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC7C,YAAA,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACjB,SAAA;QAED,IAAI,CAAC,IAAI,EAAE;AACT,YAAA,IAAI,GAAG,IAAI,CAAC,qBAAqB,CAAC;AACnC,SAAA;QAED,MAAM,UAAU,GAAa,EAAE,CAAC;QAChC,IAAI,CAAC,GAAG,CAAC,CAAC;QACV,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,CAAC,iBAAiB,GAAG,IAAI,CAAC,UAAU,EAAE;;YAE3D,MAAM,yBAAyB,GAAG,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,iBAAiB,CAAC;AAC3E,YAAA,MAAM,6BAA6B,GACjC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,UAAU,GAAG,IAAI,CAAC,yBAAyB,CAAC;YAC7E,MAAM,SAAS,GAAG,IAAI,CAAC,GAAG,CAAC,6BAA6B,EAAE,yBAAyB,CAAC,CAAC;AACrF,YAAA,IAAI,SAAS,GAAG,IAAI,GAAG,CAAC,EAAE;;gBAExB,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,IAAI,GAAG,CAAC,CAAC;gBACtD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3F,gBAAA,IAAI,CAAC,iBAAiB,IAAI,IAAI,GAAG,CAAC,CAAC;AACnC,gBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;gBACrC,CAAC,GAAG,IAAI,CAAC;gBACT,MAAM;AACP,aAAA;AAAM,iBAAA;;AAEL,gBAAA,MAAM,GAAG,GAAG,IAAI,CAAC,yBAAyB,GAAG,SAAS,CAAC;gBACvD,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,yBAAyB,EAAE,GAAG,CAAC,CAAC,CAAC;gBAC3F,IAAI,SAAS,KAAK,6BAA6B,EAAE;;AAE/C,oBAAA,IAAI,CAAC,yBAAyB,GAAG,CAAC,CAAC;oBACnC,IAAI,CAAC,WAAW,EAAE,CAAC;AACpB,iBAAA;AAAM,qBAAA;AACL,oBAAA,IAAI,CAAC,yBAAyB,GAAG,GAAG,CAAC;AACtC,iBAAA;AACD,gBAAA,IAAI,CAAC,iBAAiB,IAAI,SAAS,CAAC;gBACpC,CAAC,IAAI,SAAS,CAAC;AAChB,aAAA;AACF,SAAA;AAED,QAAA,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,CAAC;AACtC,SAAA;AAAM,aAAA,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE;YAClC,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC;AAC1B,SAAA;KACF;AACF;;AC5GD;AAMA;;AAEG;AACH;AACA,MAAM,eAAe,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC;AAE/D;;;;;;;AAOG;MACU,YAAY,CAAA;AA4CvB,IAAA,WAAA,CAAY,QAAgB,EAAE,OAAkB,EAAE,WAAoB,EAAA;AA3CtE;;;AAGG;QACK,IAAO,CAAA,OAAA,GAAa,EAAE,CAAC;AAwC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC;;QAGf,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,QAAQ,GAAG,eAAe,CAAC,CAAC;QACxD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,YAAA,IAAI,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,QAAQ,GAAG,eAAe,GAAG,eAAe,CAAC;YAC7E,IAAI,GAAG,KAAK,CAAC,EAAE;gBACb,GAAG,GAAG,eAAe,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC;AAC5C,SAAA;AAED,QAAA,IAAI,OAAO,EAAE;AACX,YAAA,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,WAAY,CAAC,CAAC;AAClC,SAAA;KACF;AA5CD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAyCD;;;;;;;;AAQG;IACI,IAAI,CAAC,OAAiB,EAAE,WAAmB,EAAA;AAChD,QAAA,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,WAAW,CAAC,CAAC;AAElD,QAAA,IAAI,CAAC,GAAG,CAAC,EACP,CAAC,GAAG,CAAC,EACL,YAAY,GAAG,CAAC,EAChB,YAAY,GAAG,CAAC,EAChB,cAAc,GAAG,CAAC,CAAC;AACrB,QAAA,OAAO,cAAc,GAAG,IAAI,CAAC,KAAK,EAAE;AAClC,YAAA,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;YAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC/B,YAAA,MAAM,SAAS,GAAG,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,YAAY,EAAE,YAAY,CAAC,CAAC;YAElE,cAAc,IAAI,SAAS,CAAC;YAC5B,YAAY,IAAI,SAAS,CAAC;YAC1B,YAAY,IAAI,SAAS,CAAC;AAC1B,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACD,YAAA,IAAI,YAAY,KAAK,MAAM,CAAC,MAAM,EAAE;AAClC,gBAAA,CAAC,EAAE,CAAC;gBACJ,YAAY,GAAG,CAAC,CAAC;AAClB,aAAA;AACF,SAAA;;AAGD,QAAA,OAAO,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;AACrB,QAAA,IAAI,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE;AACtB,YAAA,OAAO,CAAC,CAAC,CAAC,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC;AAC7C,SAAA;KACF;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,aAAa,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;KACnD;AACF;;ACpID;AAgBA;;;;;;;;;;;;;;;;;;;;;AAqBG;MACU,eAAe,CAAA;AAuF1B;;;;;;;;;;;AAWG;IACH,WACE,CAAA,QAAkB,EAClB,UAAkB,EAClB,UAAkB,EAClB,eAAgC,EAChC,WAAmB,EACnB,QAAyB,EAAA;AAlF3B;;AAEG;AACc,QAAA,IAAA,CAAA,OAAO,GAAiB,IAAIM,mBAAY,EAAE,CAAC;AAO5D;;AAEG;QACK,IAAM,CAAA,MAAA,GAAW,CAAC,CAAC;AAE3B;;AAEG;QACK,IAAW,CAAA,WAAA,GAAY,KAAK,CAAC;AAErC;;AAEG;QACK,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEjC;;AAEG;QACK,IAAyB,CAAA,yBAAA,GAAW,CAAC,CAAC;AAO9C;;AAEG;QACK,IAAU,CAAA,UAAA,GAAW,CAAC,CAAC;AAE/B;;;;;;AAMG;QACK,IAAmB,CAAA,mBAAA,GAAa,EAAE,CAAC;AAE3C;;AAEG;QACK,IAAgB,CAAA,gBAAA,GAAW,CAAC,CAAC;AAErC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;AAEtC;;AAEG;QACK,IAAQ,CAAA,QAAA,GAAmB,EAAE,CAAC;QAsBpC,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,UAAU,IAAI,CAAC,EAAE;AACnB,YAAA,MAAM,IAAI,UAAU,CAAC,gDAAgD,UAAU,CAAA,CAAE,CAAC,CAAC;AACpF,SAAA;QAED,IAAI,WAAW,IAAI,CAAC,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,WAAW,CAAA,CAAE,CAAC,CAAC;AACtF,SAAA;AAED,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,UAAU,GAAG,UAAU,CAAC;AAC7B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;AACzB,QAAA,IAAI,CAAC,eAAe,GAAG,eAAe,CAAC;AACvC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;KAC1B;AAED;;;;AAIG;AACI,IAAA,MAAM,EAAE,GAAA;QACb,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;YAC3C,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,KAAI;gBAChC,IAAI,GAAG,OAAO,IAAI,KAAK,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC;AAC1E,gBAAA,IAAI,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;AAEhC,gBAAA,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE;AACvB,oBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AACvB,iBAAA;AACH,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;gBAChC,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;AAClC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;AAC3B,gBAAA,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC;AACxB,gBAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAChC,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAG,KAAI;AAC/B,gBAAA,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;AACpB,gBAAA,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;gBACtB,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,aAAC,CAAC,CAAC;YAEH,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AAC/B,gBAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;oBAC5B,IAAI,CAAC,uBAAuB,EAAE,CAAC;oBAC/B,OAAO;AACR,iBAAA;gBAED,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,CAAC,yBAAyB,KAAK,CAAC,EAAE;AAC5D,oBAAA,IAAI,IAAI,CAAC,gBAAgB,GAAG,CAAC,IAAI,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,UAAU,EAAE;AACxE,wBAAA,MAAM,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;AACzD,wBAAA,IAAI,CAAC,eAAe,CAAC,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAAE,MAAM,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC;6BAC7E,IAAI,CAAC,OAAO,CAAC;6BACb,KAAK,CAAC,MAAM,CAAC,CAAC;AAClB,qBAAA;AAAM,yBAAA,IAAI,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;wBACnD,OAAO;AACR,qBAAA;AAAM,yBAAA;AACL,wBAAA,OAAO,EAAE,CAAC;AACX,qBAAA;AACF,iBAAA;AACH,aAAC,CAAC,CAAC;AACL,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACK,IAAA,oBAAoB,CAAC,IAAY,EAAA;AACvC,QAAA,IAAI,CAAC,mBAAmB,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACpC,QAAA,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,MAAM,CAAC;KACtC;AAED;;;;AAIG;AACK,IAAA,kCAAkC,CAAC,MAAqB,EAAA;QAC9D,IAAI,CAAC,MAAM,EAAE;AACX,YAAA,MAAM,GAAG,IAAI,YAAY,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC7F,SAAA;AAAM,aAAA;YACL,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE,IAAI,CAAC,gBAAgB,CAAC,CAAC;AAC9D,SAAA;AAED,QAAA,IAAI,CAAC,gBAAgB,IAAI,MAAM,CAAC,IAAI,CAAC;AACrC,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;;AAQG;IACK,WAAW,GAAA;AACjB,QAAA,OAAO,IAAI,CAAC,gBAAgB,IAAI,IAAI,CAAC,UAAU,EAAE;AAC/C,YAAA,IAAI,MAAoB,CAAC;AAEzB,YAAA,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;AAC5B,gBAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAG,CAAC;AAChC,gBAAA,IAAI,CAAC,kCAAkC,CAAC,MAAM,CAAC,CAAC;AACjD,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU,EAAE;AACrC,oBAAA,MAAM,GAAG,IAAI,CAAC,kCAAkC,EAAE,CAAC;oBACnD,IAAI,CAAC,UAAU,EAAE,CAAC;AACnB,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,OAAO,KAAK,CAAC;AACd,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YAC3B,IAAI,CAAC,uBAAuB,EAAE,CAAC;AAChC,SAAA;AACD,QAAA,OAAO,IAAI,CAAC;KACb;AAED;;;AAGG;AACK,IAAA,MAAM,uBAAuB,GAAA;AACnC,QAAA,IAAI,MAAgC,CAAC;QACrC,GAAG;AACD,YAAA,IAAI,IAAI,CAAC,yBAAyB,IAAI,IAAI,CAAC,WAAW,EAAE;gBACtD,OAAO;AACR,aAAA;AAED,YAAA,MAAM,GAAG,IAAI,CAAC,QAAQ,CAAC,KAAK,EAAE,CAAC;AAC/B,YAAA,IAAI,MAAM,EAAE;AACV,gBAAA,IAAI,CAAC,sBAAsB,CAAC,MAAM,CAAC,CAAC;AACrC,aAAA;AACF,SAAA,QAAQ,MAAM,EAAE;KAClB;AAED;;;;AAIG;IACK,MAAM,sBAAsB,CAAC,MAAoB,EAAA;AACvD,QAAA,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC;QAEjC,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,MAAM,IAAI,YAAY,CAAC;QAE5B,IAAI;YACF,MAAM,IAAI,CAAC,eAAe,CACxB,MAAM,MAAM,CAAC,iBAAiB,EAAE,EAChC,YAAY,EACZ,IAAI,CAAC,MAAM,GAAG,YAAY,CAC3B,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,GAAQ,EAAE;YACjB,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;YAChC,OAAO;AACR,SAAA;QAED,IAAI,CAAC,yBAAyB,EAAE,CAAC;AACjC,QAAA,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;AACzB,QAAA,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;KAC/B;AAED;;;;AAIG;AACK,IAAA,WAAW,CAAC,MAAoB,EAAA;AACtC,QAAA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AAC3B,QAAA,IAAI,CAAC,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,WAAW,EAAE,IAAI,CAAC,IAAI,CAAC,WAAW,EAAE;AAC5D,YAAA,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC;AACxB,SAAA;KACF;AACF;;AC5UD;AAMA;;;;;;;;AAQG;AACI,eAAe,cAAc,CAClC,MAA6B,EAC7B,MAAc,EACd,MAAc,EACd,GAAW,EACX,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,KAAK,GAAG,GAAG,GAAG,MAAM,CAAC;IAE3B,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;AAC3C,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;YACzB,IAAI,GAAG,IAAI,KAAK,EAAE;AAChB,gBAAA,OAAO,EAAE,CAAC;gBACV,OAAO;AACR,aAAA;AAED,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;;YAGD,MAAM,WAAW,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,KAAK,GAAG,KAAK,GAAG,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC;YAE5E,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,EAAE,MAAM,GAAG,GAAG,EAAE,MAAM,GAAG,GAAG,GAAG,WAAW,CAAC,CAAC;YACnF,GAAG,IAAI,WAAW,CAAC;AACrB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,IAAI,GAAG,GAAG,KAAK,EAAE;gBACf,MAAM,CACJ,IAAI,KAAK,CACP,CAAA,4DAAA,EAA+D,GAAG,CAAA,aAAA,EAAgB,KAAK,CAAA,CAAE,CAC1F,CACF,CAAC;AACH,aAAA;AACD,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC7B,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;;;;AAQG;AACI,eAAe,eAAe,CACnC,MAA6B,EAC7B,MAAc,EACd,QAAyB,EAAA;AAEzB,IAAA,IAAI,GAAG,GAAG,CAAC,CAAC;AACZ,IAAA,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC;IAEjC,OAAO,IAAI,OAAO,CAAS,CAAC,OAAO,EAAE,MAAM,KAAI;AAC7C,QAAA,MAAM,CAAC,EAAE,CAAC,UAAU,EAAE,MAAK;AACzB,YAAA,IAAI,KAAK,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YAC1B,IAAI,CAAC,KAAK,EAAE;gBACV,OAAO;AACR,aAAA;AACD,YAAA,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;gBAC7B,KAAK,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;AACtC,aAAA;AAED,YAAA,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,GAAG,UAAU,EAAE;gBACnC,MAAM,CAAC,IAAI,KAAK,CAAC,4CAA4C,UAAU,CAAA,CAAE,CAAC,CAAC,CAAC;gBAC5E,OAAO;AACR,aAAA;AAED,YAAA,MAAM,CAAC,IAAI,CAAC,KAAK,EAAE,GAAG,EAAE,GAAG,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC;AAC5C,YAAA,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC;AACtB,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,MAAK;YACpB,OAAO,CAAC,GAAG,CAAC,CAAC;AACf,SAAC,CAAC,CAAC;AAEH,QAAA,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;AAC7B,KAAC,CAAC,CAAC;AACL,CAAC;AAyBD;;;;;;;AAOG;AACI,eAAe,qBAAqB,CACzC,EAAyB,EACzB,IAAY,EAAA;IAEZ,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,KAAI;QAC3C,MAAM,EAAE,GAAGC,aAAE,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAEtC,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;QAEH,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,GAAU,KAAI;YAC5B,MAAM,CAAC,GAAG,CAAC,CAAC;AACd,SAAC,CAAC,CAAC;AAEH,QAAA,EAAE,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;AAExB,QAAA,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;AACd,KAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;AAIG;AACI,MAAM,MAAM,GAAGC,eAAI,CAAC,SAAS,CAACD,aAAE,CAAC,IAAI,CAAC,CAAC;AAEvC,MAAM,kBAAkB,GAAGA,aAAE,CAAC,gBAAgB;;AC+qBrD;;;AAGG;AACG,MAAO,UAAW,SAAQ,aAAa,CAAA;AAqF3C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAAC7B,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AAED,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,CAAC,EAAE,QAAQ,EAAE,IAAI,CAAC,KAAK,EAAE,aAAa,EAAE,IAAI,CAAC,cAAc,EAAE;AAC3D,YAAA,IAAI,CAAC,+BAA+B,EAAE,EAAE;QAC1C,IAAI,CAAC,WAAW,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AAE9D,QAAA,IAAI,CAAC,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,QAAQ,CAAW,CAAC;AACvF,QAAA,IAAI,CAAC,UAAU,GAAG,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,YAAY,CAAC,UAAU,CAAC,SAAS,CAAW,CAAC;KAC1F;AArKD;;AAEG;AACH,IAAA,IAAW,IAAI,GAAA;QACb,OAAO,IAAI,CAAC,KAAK,CAAC;KACnB;AAED;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AA2JD;;;;;;AAMG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,WAAW,CAAC,SAAiB,EAAA;AAClC,QAAA,OAAO,IAAI,UAAU,CACnB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,SAAS,EACjC,SAAS,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,SAAS,CAC/C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;AAGG;IACI,mBAAmB,GAAA;QACxB,OAAO,IAAI,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACtD;AAED;;;AAGG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;AAGG;IACI,iBAAiB,GAAA;QACtB,OAAO,IAAI,cAAc,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACpD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA0DG;IACI,MAAM,QAAQ,CACnB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;;QAEjC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAE5E,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CACzC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;AACd,oBAAA,kBAAkB,EAAEnB,eAAM,GAAG,SAAS,GAAG,OAAO,CAAC,UAAU;iBAC5D,EACD,KAAK,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAC5E,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB,EAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACjC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,MAAM,UAAU,GACX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAAA,CAC5F,CAAC;;YAEF,IAAI,CAACA,eAAM,EAAE;AACX,gBAAA,OAAO,UAAU,CAAC;AACnB,aAAA;;;;;;YAOD,IAAI,OAAO,CAAC,gBAAgB,KAAK,SAAS,IAAI,OAAO,CAAC,gBAAgB,GAAG,CAAC,EAAE;;AAE1E,gBAAA,OAAO,CAAC,gBAAgB,GAAG,mCAAmC,CAAC;AAChE,aAAA;AAED,YAAA,IAAI,GAAG,CAAC,aAAa,KAAK,SAAS,EAAE;AACnC,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,kEAAA,CAAoE,CAAC,CAAC;AAC5F,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;AACb,gBAAA,MAAM,IAAI,UAAU,CAAC,CAAA,wDAAA,CAA0D,CAAC,CAAC;AAClF,aAAA;YAED,OAAO,IAAI,oBAAoB,CAC7B,UAAU,EACV,OAAO,KAAa,KAAoC;;AACtD,gBAAA,MAAM,sBAAsB,GAA+B;oBACzD,qBAAqB,EAAE,OAAO,CAAC,UAAU;AACzC,oBAAA,wBAAwB,EAAE;wBACxB,OAAO,EAAE,OAAO,CAAC,UAAW,CAAC,OAAO,IAAI,GAAG,CAAC,IAAI;AAChD,wBAAA,eAAe,EAAE,OAAO,CAAC,UAAW,CAAC,eAAe;AACpD,wBAAA,WAAW,EAAE,OAAO,CAAC,UAAW,CAAC,WAAW;AAC5C,wBAAA,iBAAiB,EAAE,OAAO,CAAC,UAAW,CAAC,iBAAiB;AACxD,wBAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa;AAC1C,qBAAA;oBACD,KAAK,EAAE,aAAa,CAAC;AACnB,wBAAA,KAAK,EAAE,MAAM,GAAG,GAAG,CAAC,aAAc,GAAG,KAAK;AAC1C,wBAAA,MAAM,EAAE,KAAK;qBACd,CAAC;oBACF,kBAAkB,EAAE,OAAO,CAAC,kBAAkB;oBAC9C,oBAAoB,EAAE,OAAO,CAAC,oBAAoB;oBAClD,QAAQ,EAAE,OAAO,CAAC,QAAQ;oBAC1B,OAAO,EAAE,OAAO,CAAC,mBAAmB;iBACrC,CAAC;;;;;;;AASF,gBAAA,OAAO,CACL,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,CAC7B,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,sBAAsB,EACzB,EACF,kBAAmB,CAAC;AACxB,aAAC,EACD,MAAM,EACN,GAAG,CAAC,aAAc,EAClB;gBACE,gBAAgB,EAAE,OAAO,CAAC,gBAAgB;gBAC1C,UAAU,EAAE,OAAO,CAAC,UAAU;AAC/B,aAAA,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;gBAChD,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;;AAExB,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;AAAM,iBAAA,IACL,CAAC,CAAC,UAAU,KAAK,GAAG;AACpB,iBAAC,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,sCAAsC;AAC7D,oBAAA,CAAC,CAAC,OAAO,CAAC,SAAS,KAAK,yCAAyC,CAAC,EACpE;;AAEA,gBAAA,OAAO,IAAI,CAAC;AACb,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAoC,EAAE,EAAA;;AAEtC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;YAC9C,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,aAAa,CAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;YAEH,OACK,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,GAAG,KACN,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,oCAAoC,EAAE,GAAG,CAAC,yBAAyB,EACnE,iCAAiC,EAAE,4BAA4B,CAAC,GAAG,CAAC,sBAAsB,CAAC,EAC3F,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAA6B,EAAE,EAAA;;AACjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;QAC1E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,EAClC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAA6B,EAAE,EAAA;;AAE/B,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,cAAc,EAAE;gBAC3C,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,wEAAwE;AAClF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,QAAQ,CAAC,OAAA,GAA+B,EAAE,EAAA;AACrD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qBAAqB,EAAE,OAAO,CAAC,CAAC;QAC5E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,QAAQ,iBACpC,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,cAAc,CACzB,eAAiC,EACjC,UAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,iBAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,eAAe,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAGxC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,OAAO,CAAC,IAAU,EAAE,UAA8B,EAAE,EAAA;;AAC/D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACnC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,KACrD,IAAI,EAAE,UAAU,CAAC,IAAI,CAAC,EAAA,CAAA,CACtB,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,OAAO,CAAC,OAAA,GAA8B,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;QAC3E,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAA,MAAA,CAAA,MAAA,CAAA,EAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YACH,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,CACX,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,IAAI,EAAE,MAAM,CAAC,EAAE,UAAU,EAAE,QAAQ,CAAC,UAAU,EAAE,CAAC,IAAI,EAAE,EAAA,CACxD,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;AAKG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAqC,EAAE,EAAA;;AAEvC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,cAAc,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;AACI,IAAA,MAAM,gBAAgB,CAC3B,UAAkB,EAClB,UAAuC,EAAE,EAAA;AAIzC,QAAA,MAAM,MAAM,GAAyB;AACnC,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;AAC7D,YAAA,aAAa,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC;AACvD,YAAA,gBAAgB,EAAE,CAAC,GAAG,IAAI,KAAK,IAAI,CAAC,gBAAgB,CAAC,GAAG,IAAI,CAAC;SAC9D,CAAC;AACF,QAAA,MAAM,MAAM,GAAG,IAAI,0BAA0B,CAAC;AAC5C,YAAA,UAAU,EAAE,MAAM;YAClB,UAAU;YACV,YAAY,EAAE,OAAO,CAAC,YAAY;YAClC,UAAU,EAAE,OAAO,CAAC,UAAU;YAC9B,UAAU,EAAE,OAAO,CAAC,UAAU;AAC9B,YAAA,uBAAuB,EAAE,OAAO;AACjC,SAAA,CAAC,CAAC;;;AAIH,QAAA,MAAM,MAAM,CAAC,IAAI,EAAE,CAAC;AAEpB,QAAA,OAAO,MAAM,CAAC;KACf;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,gBAAgB,CAC3B,MAAc,EACd,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,MAAM,EAAA,MAAA,CAAA,MAAA,CAAA,EACnD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,eAAe,CAC1B,UAAkB,EAClB,UAAsC,EAAE,EAAA;;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAClD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACpE,iBAAA,EACD,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,cAAc,EAAE,OAAO,CAAC,cAAc,EAAA,EACnC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,aAAa,CACxB,IAAkD,EAClD,UAA8B,EAAE,EAAA;;AAEhC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,0BAA0B,EAAE,OAAO,CAAC,CAAC;QACjF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,OAAO,CAAC,YAAY,CAAC,IAAI,CAAE,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IA8CM,MAAM,gBAAgB,CAC3B,MAAwB,EACxB,MAAe,EACf,MAA6C,EAC7C,MAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,IAAI,MAA0B,CAAC;QAC/B,IAAI,MAAM,GAAG,CAAC,CAAC;QACf,IAAI,KAAK,GAAG,CAAC,CAAC;QACd,IAAI,OAAO,GAAG,MAAM,CAAC;QACrB,IAAI,MAAM,YAAY,MAAM,EAAE;YAC5B,MAAM,GAAG,MAAM,CAAC;AAChB,YAAA,MAAM,GAAG,MAAM,IAAI,CAAC,CAAC;AACrB,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AACjD,YAAA,KAAK,GAAG,OAAO,MAAM,KAAK,QAAQ,GAAG,MAAM,GAAG,CAAC,CAAC;AAChD,YAAA,OAAO,GAAI,MAAsC,IAAI,EAAE,CAAC;AACzD,SAAA;AACD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,gBAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAAC,+BAA+B,CAAC,CAAC;AACvD,aAAA;AACD,YAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,gBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,aAAA;YAED,IAAI,MAAM,GAAG,CAAC,EAAE;AACd,gBAAA,MAAM,IAAI,UAAU,CAAC,4BAA4B,CAAC,CAAC;AACpD,aAAA;AAED,YAAA,IAAI,KAAK,IAAI,KAAK,IAAI,CAAC,EAAE;AACvB,gBAAA,MAAM,IAAI,UAAU,CAAC,qCAAqC,CAAC,CAAC;AAC7D,aAAA;AAED,YAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,gBAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,aAAA;;YAGD,IAAI,CAAC,KAAK,EAAE;gBACV,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAO,CAAC,cAAc,CACtB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACH,gBAAA,KAAK,GAAG,QAAQ,CAAC,aAAc,GAAG,MAAM,CAAC;gBACzC,IAAI,KAAK,GAAG,CAAC,EAAE;oBACb,MAAM,IAAI,UAAU,CAClB,CAAU,OAAA,EAAA,MAAM,CAAuC,oCAAA,EAAA,QAAQ,CAAC,aAAc,CAAE,CAAA,CACjF,CAAC;AACH,iBAAA;AACF,aAAA;;YAGD,IAAI,CAAC,MAAM,EAAE;gBACX,IAAI;AACF,oBAAA,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;AAC9B,iBAAA;AAAC,gBAAA,OAAO,KAAU,EAAE;oBACnB,MAAM,IAAI,KAAK,CACb,CAA0C,uCAAA,EAAA,KAAK,CAAqJ,kJAAA,EAAA,KAAK,CAAC,OAAO,CAAE,CAAA,CACpN,CAAC;AACH,iBAAA;AACF,aAAA;AAED,YAAA,IAAI,MAAM,CAAC,MAAM,GAAG,KAAK,EAAE;AACzB,gBAAA,MAAM,IAAI,UAAU,CAClB,mFAAmF,KAAK,CAAA,CAAE,CAC3F,CAAC;AACH,aAAA;YAED,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;AAC7C,YAAA,KAAK,IAAI,GAAG,GAAG,MAAM,EAAE,GAAG,GAAG,MAAM,GAAG,KAAK,EAAE,GAAG,GAAG,GAAG,GAAG,OAAO,CAAC,SAAS,EAAE;AAC1E,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAW;;AAE5B,oBAAA,IAAI,QAAQ,GAAG,MAAM,GAAG,KAAM,CAAC;AAC/B,oBAAA,IAAI,GAAG,GAAG,OAAO,CAAC,SAAU,GAAG,QAAQ,EAAE;AACvC,wBAAA,QAAQ,GAAG,GAAG,GAAG,OAAO,CAAC,SAAU,CAAC;AACrC,qBAAA;AACD,oBAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,GAAG,EAAE,QAAQ,GAAG,GAAG,EAAE;wBACxD,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,gBAAgB,EAAE,OAAO,CAAC,wBAAwB;wBAClD,mBAAmB,EAAE,OAAO,CAAC,mBAAmB;wBAChD,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,GACtB,kCAAkC,CAAC,cAAc,CAAC,CACtD;AACF,qBAAA,CAAC,CAAC;AACH,oBAAA,MAAM,MAAM,GAAG,QAAQ,CAAC,kBAAmB,CAAC;AAC5C,oBAAA,MAAM,cAAc,CAAC,MAAM,EAAE,MAAO,EAAE,GAAG,GAAG,MAAM,EAAE,QAAQ,GAAG,MAAM,CAAC,CAAC;;;;AAIvE,oBAAA,gBAAgB,IAAI,QAAQ,GAAG,GAAG,CAAC;oBACnC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;AACjB,YAAA,OAAO,MAAM,CAAC;AACf,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;IACI,MAAM,cAAc,CACzB,QAAgB,EAChB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,OAAA,GAA+B,EAAE,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,KAAK,EAC7C,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,KACV,cAAc,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,OAAO,CAAC,cAAc,CAAA,EACtB,kCAAkC,CAAC,cAAc,CAAC,CAAA,EAAA,CAAA,CAEvD,CAAC;YACH,IAAI,QAAQ,CAAC,kBAAkB,EAAE;gBAC/B,MAAM,qBAAqB,CAAC,QAAQ,CAAC,kBAAkB,EAAE,QAAQ,CAAC,CAAC;AACpE,aAAA;;AAGA,YAAA,QAAgB,CAAC,kBAAkB,GAAG,SAAS,CAAC;AACjD,YAAA,OAAO,QAAQ,CAAC;AACjB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAEO,+BAA+B,GAAA;AACrC,QAAA,IAAI,aAAa,CAAC;AAClB,QAAA,IAAI,QAAQ,CAAC;QACb,IAAI;;;;;;;;YASF,MAAM,SAAS,GAAGtB,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;gBAGjD,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;gBAIvC,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;AAC9E,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;AAAM,iBAAA;;;gBAGL,MAAM,cAAc,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;AACtE,gBAAA,aAAa,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AACnC,gBAAA,QAAQ,GAAG,cAAe,CAAC,CAAC,CAAC,CAAC;AAC/B,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAClD,YAAA,QAAQ,GAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC;;;YAIxC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC;YAExC,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC;AACpC,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,yEAAyE,CAAC,CAAC;AAC5F,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACK,IAAA,MAAM,gBAAgB,CAC5B,UAAkB,EAClB,UAAuC,EAAE,EAAA;;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAE1D,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,gBAAgB,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;AACnE,oBAAA,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,aAAa;iBACrD,EACD,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,iBAAiB,EAAE,OAAO,CAAC,iBAAiB,EAC5C,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,QAAQ,EAAE,OAAO,CAAC,QAAQ,EACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEsB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAkC,EAAA;AACtD,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,CAEtC,MAAA,CAAA,MAAA,CAAA,EAAA,aAAa,EAAE,IAAI,CAAC,cAAc,EAClC,QAAQ,EAAE,IAAI,CAAC,KAAK,EACpB,YAAY,EAAE,IAAI,CAAC,SAAS,EAC5B,SAAS,EAAE,IAAI,CAAC,UAAU,EAAA,EACvB,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;IACI,MAAM,wBAAwB,CACnC,OAA6C,EAAA;AAE7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,wBAAwB,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,qBAAqB,CAChC,kBAA0C,EAC1C,OAA0C,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,wBAAwB,EAAE,kBAAkB,CAAC,UAAU,EACvD,sBAAsB,EAAE,kBAAkB,CAAC,UAAU,EACrD,wBAAwB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,uBAAuB,EACvD,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,YAAY,CACvB,gBAAyB,EACzB,OAAiC,EAAA;AAEjC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,gBAAgB,EACzD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,aAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EAAA,EAC9B,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AA4ND;;AAEG;AACG,MAAO,gBAAiB,SAAQ,UAAU,CAAA;AAsE9C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;;YAE5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,iBAAiB,GAAG,IAAI,UAAU,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KACpE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,gBAAgB,CACzB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAmC,EAAE,EAAA;;AACvD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yBAAyB,EAAE,OAAO,CAAC,CAAC;QAChF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,MAAM,CAAC,CAAC,EAC1C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAAA,EAC3C,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAA8C,EAAE,EAAA;;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,oCAAoC,EAAE,OAAO,CAAC,CAAC;AAC3F,QAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;QAC5C,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACxB,cAAc,CAAA,EAAA,EACjB,UAAU,EAAA,CAAA,CACV,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;AAIG;AACI,IAAA,MAAM,IAAI,CAAC,OAAA,GAAiC,EAAE,EAAA;;AACnD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,CAAA,MAAA,CAAA,MAAA,CAAA,EACtC,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;AAuBG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,aAAqB,EACrB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAEhE,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,WAAW,CAAC,aAAa,EAAE,IAAI,kBACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACI,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,KAAa,EACb,OAAA,GAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;QAC1D,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,YAAA,OAAO,MAAM,IAAI,CAAC,iBAAiB,CAAC,kBAAkB,CAAC,SAAS,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EACjE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,WAAW,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC3D,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACrC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAmkBD;;AAEG;AACG,MAAO,eAAgB,SAAQ,UAAU,CAAA;AA8E7C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;QACjE,IAAI,CAAC,YAAY,GAAG,IAAIZ,MAAW,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6BG;AACI,IAAA,MAAM,KAAK,CAChB,KAAa,EACb,UAAiC,EAAE,EAAA;;QAEnC,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAEhE,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAE9E,IAAI;YACF,IAAI,CAACnB,eAAM,EAAE;AACX,gBAAA,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAC;AAC3E,aAAA;YACD,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,KAAK,CAC5C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,YAAY,EAAE;AACZ,oBAAA,SAAS,EAAE,KAAK;AAChB,oBAAA,UAAU,EAAE,KAAK;AACjB,oBAAA,kBAAkB,EAAE,oBAAoB,CAAC,OAAO,CAAC,sBAAsB,CAAC;AACxE,oBAAA,mBAAmB,EAAE,oBAAoB,CAAC,OAAO,CAAC,uBAAuB,CAAC;AAC3E,iBAAA,EACD,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EAAA,EACjC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACH,YAAA,OAAO,IAAI,iBAAiB,CAAC,QAAQ,EAAE;gBACrC,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;gBAC9B,OAAO,EAAE,OAAO,CAAC,OAAO;AACzB,aAAA,CAAC,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;AA0BG;IACI,MAAM,MAAM,CACjB,IAAqB,EACrB,aAAqB,EACrB,UAAkC,EAAE,EAAA;;QAEpC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EAC3D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;AAEI,IAAA,MAAM,iBAAiB,CAC5B,SAAiB,EACjB,UAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,cAAc,CAAC,CAAC,EAAE,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACzD,OAAO,CACV,EAAA,EAAA,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,aAAa,EAAA,CAAA,EAE1C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,OAAO;AAChD,oBAAA,qBAAqB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,eAAe;AAChE,oBAAA,iBAAiB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,WAAW;AACxD,oBAAA,uBAAuB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,iBAAiB;AACpE,oBAAA,YAAY,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,gBAAgB,0CAAE,aAAa;iBACtD,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC/E,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC9C,cAAc,EAAE,OAAO,CAAC,cAAc,EACnC,CAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACI,MAAM,UAAU,CACrB,OAAe,EACf,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,OAAO,EAAE,aAAa,EAAE,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,EACxE,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;AACrC,iBAAA,EACD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;;;AAoBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAe,EACf,SAAiB,EACjB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,UAA6C,EAAE,EAAA;AAE/C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,iBAAiB,CAAC,OAAO,EAAE,CAAC,EAAE,SAAS,EACxE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,WAAW,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,KAAK,GAAG,SAAS,GAAG,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAClF,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,eAAe,CAC1B,MAAgB,EAChB,UAA2C,EAAE,EAAA;;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,EAAE,MAAM,EAAE,MAAM,EAAE,kBAEhB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,YAAY,CACvB,QAAuB,EACvB,UAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,YAAY,CAAC,QAAQ,EAC3D,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAExC,CAAA,EAAA,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,IAAI,CAAC,GAAG,CAAC,eAAe,EAAE;AACxB,gBAAA,GAAG,CAAC,eAAe,GAAG,EAAE,CAAC;AAC1B,aAAA;AAED,YAAA,IAAI,CAAC,GAAG,CAAC,iBAAiB,EAAE;AAC1B,gBAAA,GAAG,CAAC,iBAAiB,GAAG,EAAE,CAAC;AAC5B,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;;AAID;;;;;;;;;;;;;;AAcG;AACI,IAAA,MAAM,UAAU,CACrB,IAAmD,EACnD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;AACF,YAAA,IAAIrB,eAAM,EAAE;AACV,gBAAA,IAAI,MAAc,CAAC;gBACnB,IAAI,IAAI,YAAY,MAAM,EAAE;oBAC1B,MAAM,GAAG,IAAI,CAAC;AACf,iBAAA;qBAAM,IAAI,IAAI,YAAY,WAAW,EAAE;AACtC,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC5B,iBAAA;AAAM,qBAAA;oBACL,IAAI,GAAG,IAAuB,CAAC;AAC/B,oBAAA,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,UAAU,CAAC,CAAC;AACrE,iBAAA;AAED,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAa,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAC7E,MAAM,CAAC,UAAU,EACjB,cAAc,CACf,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC;AACrC,gBAAA,OAAO,IAAI,CAAC,sBAAsB,CAChC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,aAAA;AACF,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEqB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;;AAkBG;AACI,IAAA,MAAM,iBAAiB,CAC5B,WAAiD,EACjD,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,WAAW,GAAG,IAAI,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC,CAAC;AAC5C,YAAA,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAc,EAAE,IAAY,KAAW,WAAW,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,EAChF,WAAW,CAAC,IAAI,EAChB,cAAc,CACf,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;AAcG;IACK,MAAM,sBAAsB,CAClC,WAA8D,EAC9D,IAAY,EACZ,UAA0C,EAAE,EAAA;AAE5C,QAAA,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE;AACtB,YAAA,OAAO,CAAC,SAAS,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,OAAO,CAAC,SAAS,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,GAAG,gCAAgC,EAAE;AACjF,YAAA,MAAM,IAAI,UAAU,CAClB,wCAAwC,gCAAgC,CAAA,CAAE,CAC3E,CAAC;AACH,SAAA;QAED,IAAI,OAAO,CAAC,iBAAiB,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE;AACjE,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,CAAC;AAC9D,SAAA;AACD,QAAA,IACE,OAAO,CAAC,iBAAiB,GAAG,CAAC;AAC7B,YAAA,OAAO,CAAC,iBAAiB,GAAG,gCAAgC,EAC5D;AACA,YAAA,MAAM,IAAI,UAAU,CAClB,gDAAgD,gCAAgC,CAAA,CAAE,CACnF,CAAC;AACH,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,SAAS,KAAK,CAAC,EAAE;AAC3B,YAAA,IAAI,IAAI,GAAG,gCAAgC,GAAG,qBAAqB,EAAE;AACnE,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,IAAI,CAAA,yCAAA,CAA2C,CAAC,CAAC;AAC1E,aAAA;AACD,YAAA,IAAI,IAAI,GAAG,OAAO,CAAC,iBAAiB,EAAE;gBACpC,OAAO,CAAC,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,qBAAqB,CAAC,CAAC;AAC5D,gBAAA,IAAI,OAAO,CAAC,SAAS,GAAG,iCAAiC,EAAE;AACzD,oBAAA,OAAO,CAAC,SAAS,GAAG,iCAAiC,CAAC;AACvD,iBAAA;AACF,aAAA;AACF,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,IAAI,IAAI,IAAI,OAAO,CAAC,iBAAiB,EAAE;AACrC,gBAAA,OAAO,MAAM,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,cAAc,CAAC,CAAC;AACtE,aAAA;AAED,YAAA,MAAM,SAAS,GAAW,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,GAAG,CAAC,IAAI,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC;YACzE,IAAI,SAAS,GAAG,qBAAqB,EAAE;gBACrC,MAAM,IAAI,UAAU,CAClB,CAA6D,2DAAA,CAAA;oBAC3D,CAAmC,gCAAA,EAAA,qBAAqB,CAAE,CAAA,CAC7D,CAAC;AACH,aAAA;YAED,MAAM,SAAS,GAAa,EAAE,CAAC;AAC/B,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YAEjC,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,CAAC;YAC7C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,EAAE,CAAC,EAAE,EAAE;AAClC,gBAAA,KAAK,CAAC,YAAY,CAAC,YAAyB;oBAC1C,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,CAAC,CAAC,CAAC;AAClD,oBAAA,MAAM,KAAK,GAAG,OAAO,CAAC,SAAU,GAAG,CAAC,CAAC;AACrC,oBAAA,MAAM,GAAG,GAAG,CAAC,KAAK,SAAS,GAAG,CAAC,GAAG,IAAI,GAAG,KAAK,GAAG,OAAO,CAAC,SAAU,CAAC;AACpE,oBAAA,MAAM,aAAa,GAAG,GAAG,GAAG,KAAK,CAAC;AAClC,oBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,oBAAA,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,WAAW,CAAC,KAAK,EAAE,aAAa,CAAC,EAAE,aAAa,EAAE;wBAC/E,WAAW,EAAE,OAAO,CAAC,WAAW;wBAChC,UAAU,EAAE,OAAO,CAAC,UAAU;wBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;wBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,qBAAA,CAAC,CAAC;;;oBAGH,gBAAgB,IAAI,aAAa,CAAC;oBAClC,IAAI,OAAO,CAAC,UAAU,EAAE;wBACtB,OAAO,CAAC,UAAW,CAAC;AAClB,4BAAA,WAAW,EAAE,gBAAgB;AAC9B,yBAAA,CAAC,CAAC;AACJ,qBAAA;AACH,iBAAC,CAAC,CAAC;AACJ,aAAA;AACD,YAAA,MAAM,KAAK,CAAC,EAAE,EAAE,CAAC;YAEjB,OAAO,IAAI,CAAC,eAAe,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;AACxD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAA0C,EAAE,EAAA;AAE5C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,MAAM,IAAI,GAAG,CAAC,MAAM,MAAM,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC;YAC3C,OAAO,MAAM,IAAI,CAAC,sBAAsB,CACtC,CAAC,MAAM,EAAE,KAAK,KAAI;AAChB,gBAAA,OAAO,MACL,kBAAkB,CAAC,QAAQ,EAAE;AAC3B,oBAAA,SAAS,EAAE,IAAI;AACf,oBAAA,GAAG,EAAE,KAAK,GAAG,MAAM,GAAG,KAAK,GAAG,CAAC,GAAG,QAAQ;AAC1C,oBAAA,KAAK,EAAE,MAAM;AACd,iBAAA,CAAC,CAAC;AACP,aAAC,EACD,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAEC,OAAO,CAAA,EAAA,EACV,cAAc,EACT,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAQ,CAAC,cAAc,GACvB,kCAAkC,CAAC,cAAc,CAAC,KAG1D,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACI,IAAA,MAAM,YAAY,CACvB,MAAgB,EAChB,UAAA,GAAqB,+BAA+B,EACpD,cAAyB,GAAA,CAAC,EAC1B,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE;AAC5B,YAAA,OAAO,CAAC,eAAe,GAAG,EAAE,CAAC;AAC9B,SAAA;AACD,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QAErF,IAAI;YACF,IAAI,QAAQ,GAAG,CAAC,CAAC;AACjB,YAAA,MAAM,aAAa,GAAGD,qBAAY,EAAE,CAAC;YACrC,IAAI,gBAAgB,GAAW,CAAC,CAAC;YACjC,MAAM,SAAS,GAAa,EAAE,CAAC;AAE/B,YAAA,MAAM,SAAS,GAAG,IAAI,eAAe,CACnC,MAAM,EACN,UAAU,EACV,cAAc,EACd,OAAO,IAAI,EAAE,MAAM,KAAI;gBACrB,MAAM,OAAO,GAAG,eAAe,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;AACzD,gBAAA,SAAS,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACxB,gBAAA,QAAQ,EAAE,CAAC;gBAEX,MAAM,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE;oBAC3C,UAAU,EAAE,OAAO,CAAC,UAAU;oBAC9B,eAAe,EAAE,OAAO,CAAC,eAAe;oBACxC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,iBAAA,CAAC,CAAC;;gBAGH,gBAAgB,IAAI,MAAM,CAAC;gBAC3B,IAAI,OAAO,CAAC,UAAU,EAAE;oBACtB,OAAO,CAAC,UAAU,CAAC,EAAE,WAAW,EAAE,gBAAgB,EAAE,CAAC,CAAC;AACvD,iBAAA;aACF;;;;;AAKD,YAAA,IAAI,CAAC,IAAI,CAAC,CAAC,cAAc,GAAG,CAAC,IAAI,CAAC,CAAC,CACpC,CAAC;AACF,YAAA,MAAM,SAAS,CAAC,EAAE,EAAE,CAAC;YAErB,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACtC,OAAO,CACV,EAAA,EAAA,cAAc,kCACT,OAAQ,CAAC,cAAc,CACvB,EAAA,kCAAkC,CAAC,cAAc,CAAC,KAEvD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEC,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAoaD;;AAEG;AACG,MAAO,cAAe,SAAQ,UAAU,CAAA;AA8D5C,IAAA,WAAA,CACE,qBAA6B,EAC7B,mCAKgB,EAChB,iBAAmD;;;IAGnD,OAAgC,EAAA;;;AAIhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,OAAO,GAAG,iBAA2C,CAAC;AACtD,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ;YACvD,iBAAiB;YACjB,OAAO,iBAAiB,KAAK,QAAQ,EACrC;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;YAC1D,MAAM,QAAQ,GAAG,iBAAiB,CAAC;AAEnC,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;oBACF,GAAG,GAAG,eAAe,CACnB,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B,CAAC;AAEF,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;AACD,oBAAA,eAAe,CACb,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EACtE,kBAAkB,CAAC,QAAQ,CAAC,CAC7B;wBACD,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,uEAAuE,CAAC,CAAC;AAC1F,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,eAAe,GAAG,IAAI,QAAQ,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAChE;AAED;;;;;;;AAOG;AACI,IAAA,YAAY,CAAC,QAAgB,EAAA;AAClC,QAAA,OAAO,IAAI,cAAc,CACvB,eAAe,CACb,IAAI,CAAC,GAAG,EACR,YAAY,CAAC,UAAU,CAAC,QAAQ,EAChC,QAAQ,CAAC,MAAM,KAAK,CAAC,GAAG,SAAS,GAAG,QAAQ,CAC7C,EACD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;YAChE,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAC9C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,wBAAwB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,UAAU,EAChE,sBAAsB,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,kBAAkB,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,EAC9D,SAAS,EAAE,OAAO,CAAC,SAAS,EAC5B,IAAI,EAAE,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,EAChC,cAAc,EAAE,gBAAgB,CAAC,OAAO,CAAC,IAAI,CAAC,EAC3C,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,iBAAiB,CAC5B,IAAY,EACZ,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,MAAM,UAAU,GAAG,EAAE,WAAW,EAAE,OAAO,EAAE,CAAC;AAC5C,YAAA,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAC7B,OAAO,CACV,EAAA,EAAA,UAAU,EACV,cAAc,EAAE,cAAc,CAAC,cAAc,IAC7C,CAAC;YACH,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,4EAA4E;AACtF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;IACI,MAAM,WAAW,CACtB,IAAqB,EACrB,MAAc,EACd,KAAa,EACb,OAAA,GAAsC,EAAE,EAAA;;QAExC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,WAAW,CAAC,KAAK,EAAE,IAAI,kBACvD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,cAAc,EAAE;oBACd,gBAAgB,EAAE,OAAO,CAAC,UAAU;iBACrC,EACD,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,uBAAuB,EAAE,OAAO,CAAC,uBAAuB,EACxD,yBAAyB,EAAE,OAAO,CAAC,yBAAyB,EAC5D,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,kBAAkB,CAC7B,SAAiB,EACjB,YAAoB,EACpB,UAAkB,EAClB,KAAa,EACb,OAAA,GAA6C,EAAE,EAAA;;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;QAC9C,OAAO,CAAC,gBAAgB,GAAG,OAAO,CAAC,gBAAgB,IAAI,EAAE,CAAC;AAC1D,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,oBAAoB,CAAC,OAAO,CAAC,mBAAmB,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;AAChE,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,kBAAkB,CAClD,SAAS,EACT,aAAa,CAAC,EAAE,MAAM,EAAE,YAAY,EAAE,KAAK,EAAE,CAAC,EAC9C,CAAC,EACD,aAAa,CAAC,EAAE,MAAM,EAAE,UAAU,EAAE,KAAK,EAAE,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAE1C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,gBAAgB,EAAE,OAAO,CAAC,gBAAgB,EAC1C,kBAAkB,EAAE,OAAO,CAAC,kBAAkB,EAC9C,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,8BAA8B,EAAE;AAC9B,oBAAA,aAAa,EAAE,OAAO,CAAC,gBAAgB,CAAC,OAAO;AAC/C,oBAAA,qBAAqB,EAAE,OAAO,CAAC,gBAAgB,CAAC,eAAe;AAC/D,oBAAA,iBAAiB,EAAE,OAAO,CAAC,gBAAgB,CAAC,WAAW;AACvD,oBAAA,uBAAuB,EAAE,OAAO,CAAC,gBAAgB,CAAC,iBAAiB;iBACpE,EACD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,EACxC,uBAAuB,EAAE,yBAAyB,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAC5E,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,UAAU,CACrB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAqC,EAAE,EAAA;;QAEvC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,2BAA2B,EAAE,OAAO,CAAC,CAAC;QAClF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,CAAC,EAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,kCACnB,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE3C,CAAA,EAAA,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,8BAA8B,EAAE,OAAO,CAAC,UAAU,EAClD,OAAO,EAAE,OAAO,CAAC,mBAAmB,EACpC,eAAe,EAAE,OAAO,CAAC,eAAe,IACrC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,aAAa,CACxB,MAAA,GAAiB,CAAC,EAClB,KAAc,EACd,OAAA,GAAwC,EAAE,EAAA;;QAE1C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,8BAA8B,EAAE,OAAO,CAAC,CAAC;QACrF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;AAC9B,iBAAA,aAAa,iBACZ,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAA,EACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;IACK,MAAM,qBAAqB,CACjC,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,aAAa,CAC7C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EACvC,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;AAaG;IACY,yBAAyB,CACtC,SAAiB,CAAC,EAClB,KAAc,EACd,MAAe,EACf,OAAA,GAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,qBAAqB,CACjE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;AAMG;AACY,IAAA,kBAAkB,CAC/B,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBALU,MAAM,oBAAoB,WAAA,CAAA;AAMnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAsEG;AACI,IAAA,cAAc,CACnB,MAAiB,GAAA,CAAC,EAClB,KAAc,EACd,UAAyC,EAAE,EAAA;QAE3C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAE9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC;QAC7D,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,KAAK,EAAE,QAAQ,CAAC,iBAAiB,EAC7E,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,OAAO,EACV,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,iBAAiB,CAC5B,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAE3C,CAAA,EAAA,YAAY,EAAE,YAAY,EAC1B,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;IACK,MAAM,yBAAyB,CACrC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAChG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,WAAW,EACjC,qBAAqB,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,EAC1C,wBAAwB,kCACnB,OAAO,KAAA,IAAA,IAAP,OAAO,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAP,OAAO,CAAE,UAAU,CAAA,EAAA,EACtB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,KAAP,IAAA,IAAA,OAAO,KAAP,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,OAAO,CAAE,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,EAE5C,CAAA,EAAA,YAAY,EAAE,iBAAiB,EAC/B,KAAK,EAAE,aAAa,CAAC;AACnB,oBAAA,MAAM,EAAE,MAAM;AACd,oBAAA,KAAK,EAAE,KAAK;iBACb,CAAC,EACF,MAAM,EAAE,MAAM,EACd,WAAW,EAAE,OAAO,KAAP,IAAA,IAAA,OAAO,uBAAP,OAAO,CAAE,WAAW,EAC9B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACD;;;;;;;;;;;;;;;AAeG;IACY,6BAA6B,CAC1C,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,MAAe,EACf,OAAkD,EAAA;;AAElD,YAAA,IAAI,gCAAgC,CAAC;AACrC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,gCAAgC,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,yBAAyB,CACrE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,gCAAgC,CAAC,iBAAiB,CAAC;AAC5D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,gCAAgC,CAAA,CAAA,CAAC;AAC9C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;AAOG;AACY,IAAA,sBAAsB,CACnC,MAAc,EACd,KAAa,EACb,iBAAyB,EACzB,OAAkD,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAyC,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,6BAA6B,CACzE,MAAM,EACN,KAAK,EACL,iBAAiB,EACjB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBANU,MAAM,oBAAoB,WAAA,CAAA;AAOnC,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,yBAAyB,CAAC,oBAAoB,CAAC,CAAA,CAAA,CAAA,CAAC;AACxD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAuEG;IACI,kBAAkB,CACvB,MAAc,EACd,KAAa,EACb,YAAoB,EACpB,UAA6C,EAAE,EAAA;QAE/C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;;AAG9C,QAAA,MAAM,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,MAAM,EAAE,KAAK,EAAE,YAAY,EAC/D,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,EACV,CAAC;QACH,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;gBACtC,OAAO,IAAI,CAAC,6BAA6B,CACvC,MAAM,EACN,KAAK,EACL,YAAY,EACZ,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EAExB,WAAW,EAAE,QAAQ,CAAC,WAAW,EAAA,EAC9B,OAAO,CAAA,CAEb,CAAC;aACH;SACF,CAAC;KACH;AAED;;;;;;;;;AASG;IACI,MAAM,gCAAgC,CAC3C,MAAc,EACd,KAAa,EACb,eAAuB,EACvB,OAAA,GAA4C,EAAE,EAAA;;QAE9C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,iDAAiD,EACjD,OAAO,CACR,CAAC;QAEF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe;iBAC9B,iBAAiB,CAAA,MAAA,CAAA,MAAA,CAAA,EAChB,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EACf,KAAK,EAAE,aAAa,CAAC,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,IACpC,kCAAkC,CAAC,cAAc,CAAC,CACrD,CAAA;iBACD,IAAI,CAAC,sBAAsB,CAAC,CAAC;AACjC,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,MAAM,CACjB,IAAY,EACZ,UAAiC,EAAE,EAAA;;QAEnC,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,uBAAuB,EAAE,OAAO,CAAC,CAAC;QAC9E,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,MAAM,CAAC,IAAI,EAC3C,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,KACrB,MAAM,EAAE,MAAA,OAAO,CAAC,UAAU,MAAA,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAE,aAAa,EAAA,CAAA,EAE3C,eAAe,EAAE,OAAO,CAAC,eAAe,EAAA,EACrC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,oBAAoB,CAC/B,oBAA8C,EAC9C,cAAuB,EACvB,UAA+C,EAAE,EAAA;;QAEjD,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,oBAAoB,CAAC,oBAAoB,EACzE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,kBAAkB,EAAE,cAAc,EAClC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EACnB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CAAC,UAAU,CAAA,EAAA,EACrB,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,0CAAE,aAAa,EAAA,CAAA,EAAA,EAExC,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;AAYG;AACI,IAAA,MAAM,oBAAoB,CAC/B,UAAkB,EAClB,UAA+C,EAAE,EAAA;;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,eAAe,CAAC,eAAe,CAAC,UAAU,EAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,wBAAwB,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CAAC,UAAU,CACrB,EAAA,EAAA,MAAM,EAAE,CAAA,EAAA,GAAA,OAAO,CAAC,UAAU,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,OAExC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACv2LD;AAOO,eAAe,aAAa,CACjC,aAA8C,EAAA;IAE9C,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,0BAA0B,CAAC,CAAC;IAEtD,MAAM,cAAc,GAAG,MAAM,eAAe,CAC1C,aAAa,CAAC,kBAA2C,EACzD,MAAM,CACP,CAAC;;IAGF,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,cAAc,CAAC,CAAC;AAEzC,IAAA,OAAO,MAAM,CAAC,QAAQ,EAAE,CAAC;AAC3B,CAAC;AAEK,SAAU,cAAc,CAAC,GAAW,EAAA;AACxC,IAAA,OAAO,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AAChC;;ACzBA;AAiBA,MAAM,qBAAqB,GAAG,IAAI,CAAC;AACnC,MAAM,eAAe,GAAG,GAAG,CAAC;AAC5B,MAAM,SAAS,GAAG,CAAC,CAAC,CAAC;AAErB;;AAEG;MACU,mBAAmB,CAAA;IAO9B,WACE,CAAA,aAA8C,EAC9C,WAAyC,EAAA;AAEzC,QAAA,IAAI,CAAC,aAAa,IAAI,CAAC,aAAa,CAAC,WAAW,EAAE;;AAEhD,YAAA,MAAM,IAAI,UAAU,CAAC,mEAAmE,CAAC,CAAC;AAC3F,SAAA;QAED,IAAI,CAAC,WAAW,IAAI,WAAW,CAAC,IAAI,KAAK,CAAC,EAAE;;AAE1C,YAAA,MAAM,IAAI,UAAU,CAAC,0DAA0D,CAAC,CAAC;AAClF,SAAA;AAED,QAAA,IAAI,CAAC,aAAa,GAAG,aAAa,CAAC;AACnC,QAAA,IAAI,CAAC,WAAW,GAAG,WAAW,CAAC;AAC/B,QAAA,IAAI,CAAC,qBAAqB,GAAG,IAAI,CAAC,aAAa,CAAC,WAAY,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QAC3E,IAAI,CAAC,iBAAiB,GAAG,CAAK,EAAA,EAAA,IAAI,CAAC,qBAAqB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;QAC9E,IAAI,CAAC,mBAAmB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,qBAAqB,IAAI,CAAC;KAChE;;AAGM,IAAA,MAAM,kBAAkB,GAAA;;;QAG7B,IAAI,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,KAAK,iBAAiB,CAAC,aAAa,EAAE;AAC3E,YAAA,MAAM,IAAI,KAAK,CACb,CAAA,kDAAA,EAAqD,IAAI,CAAC,aAAa,CAAC,SAAS,CAAC,MAAM,CAAA,EAAA,CAAI,CAC7F,CAAC;AACH,SAAA;QAED,MAAM,kBAAkB,GAAG,MAAM,aAAa,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC;QAEnE,MAAM,YAAY,GAAG,kBAAkB;aACpC,KAAK,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,CAAC;AAClC,aAAA,KAAK,CAAC,IAAI,CAAC,iBAAiB,CAAC;AAC7B,aAAA,KAAK,CAAC,CAAC,CAAC,CAAC;AACZ,QAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,MAAM,CAAC;;;;;QAM7C,IAAI,gBAAgB,KAAK,IAAI,CAAC,WAAW,CAAC,IAAI,IAAI,gBAAgB,KAAK,CAAC,EAAE;AACxE,YAAA,MAAM,IAAI,KAAK,CAAC,0EAA0E,CAAC,CAAC;AAC7F,SAAA;AAED,QAAA,MAAM,wBAAwB,GAA4B,IAAI,KAAK,CAAC,gBAAgB,CAAC,CAAC;QACtF,IAAI,0BAA0B,GAAW,CAAC,CAAC;QAC3C,IAAI,uBAAuB,GAAW,CAAC,CAAC;;QAGxC,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,gBAAgB,EAAE,KAAK,EAAE,EAAE;AACrD,YAAA,MAAM,WAAW,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC;YACxC,MAAM,uBAAuB,GAAG,EAAsB,CAAC;AACvD,YAAA,uBAAuB,CAAC,OAAO,GAAG,IAAIjB,oBAAW,EAAE,CAAC;YAEpD,MAAM,aAAa,GAAG,WAAW,CAAC,KAAK,CAAC,CAAG,EAAA,gBAAgB,CAAE,CAAA,CAAC,CAAC;YAC/D,IAAI,uBAAuB,GAAG,KAAK,CAAC;YACpC,IAAI,qBAAqB,GAAG,KAAK,CAAC;YAClC,IAAI,aAAa,GAAG,KAAK,CAAC;YAC1B,IAAI,SAAS,GAAG,SAAS,CAAC;AAE1B,YAAA,KAAK,MAAM,YAAY,IAAI,aAAa,EAAE;gBACxC,IAAI,CAAC,uBAAuB,EAAE;;oBAE5B,IAAI,YAAY,CAAC,UAAU,CAAC,eAAe,CAAC,UAAU,CAAC,EAAE;AACvD,wBAAA,SAAS,GAAG,QAAQ,CAAC,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;AACpE,qBAAA;;;AAID,oBAAA,IAAI,YAAY,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE;wBAC7C,uBAAuB,GAAG,IAAI,CAAC;wBAE/B,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,eAAe,CAAC,CAAC;wBACnD,uBAAuB,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;AACrD,wBAAA,uBAAuB,CAAC,aAAa,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;AAC/E,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;AAED,gBAAA,IAAI,YAAY,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;;oBAE9B,IAAI,CAAC,qBAAqB,EAAE;wBAC1B,qBAAqB,GAAG,IAAI,CAAC;AAC9B,qBAAA;AAED,oBAAA,SAAS;AACV,iBAAA;;gBAGD,IAAI,CAAC,qBAAqB,EAAE;oBAC1B,IAAI,YAAY,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC,CAAC,EAAE;;wBAEtD,MAAM,IAAI,KAAK,CACb,CAAA,oCAAA,EAAuC,YAAY,CAAoC,iCAAA,EAAA,qBAAqB,CAAI,EAAA,CAAA,CACjH,CAAC;AACH,qBAAA;;oBAGD,MAAM,MAAM,GAAG,YAAY,CAAC,KAAK,CAAC,qBAAqB,CAAC,CAAC;AACzD,oBAAA,uBAAuB,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC;oBAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,eAAe,CAAC,eAAe,EAAE;AACjD,wBAAA,uBAAuB,CAAC,SAAS,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC9C,aAAa,GAAG,IAAI,CAAC;AACtB,qBAAA;AACF,iBAAA;AAAM,qBAAA;;AAEL,oBAAA,IAAI,CAAC,uBAAuB,CAAC,UAAU,EAAE;AACvC,wBAAA,uBAAuB,CAAC,UAAU,GAAG,EAAE,CAAC;AACzC,qBAAA;AAED,oBAAA,uBAAuB,CAAC,UAAU,IAAI,YAAY,CAAC;AACpD,iBAAA;AACF,aAAA;;;;;YAMD,IACE,SAAS,KAAK,SAAS;AACvB,gBAAA,MAAM,CAAC,SAAS,CAAC,SAAS,CAAC;AAC3B,gBAAA,SAAS,IAAI,CAAC;AACd,gBAAA,SAAS,GAAG,IAAI,CAAC,WAAW,CAAC,IAAI;AACjC,gBAAA,wBAAwB,CAAC,SAAS,CAAC,KAAK,SAAS,EACjD;gBACA,uBAAuB,CAAC,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAE,CAAC;AACpE,gBAAA,wBAAwB,CAAC,SAAS,CAAC,GAAG,uBAAuB,CAAC;AAC/D,aAAA;AAAM,iBAAA;gBACL,MAAM,CAAC,KAAK,CACV,CAAA,aAAA,EAAgB,KAAK,CAAuE,oEAAA,EAAA,SAAS,CAAE,CAAA,CACxG,CAAC;AACH,aAAA;AAED,YAAA,IAAI,aAAa,EAAE;AACjB,gBAAA,uBAAuB,EAAE,CAAC;AAC3B,aAAA;AAAM,iBAAA;AACL,gBAAA,0BAA0B,EAAE,CAAC;AAC9B,aAAA;AACF,SAAA;QAED,OAAO;AACL,YAAA,YAAY,EAAE,wBAAwB;AACtC,YAAA,0BAA0B,EAAE,0BAA0B;AACtD,YAAA,uBAAuB,EAAE,uBAAuB;SACjD,CAAC;KACH;AACF;;ACrLD;AACA;AAEA,IAAK,eAGJ,CAAA;AAHD,CAAA,UAAK,eAAe,EAAA;AAClB,IAAA,eAAA,CAAA,eAAA,CAAA,QAAA,CAAA,GAAA,CAAA,CAAA,GAAA,QAAM,CAAA;AACN,IAAA,eAAA,CAAA,eAAA,CAAA,UAAA,CAAA,GAAA,CAAA,CAAA,GAAA,UAAQ,CAAA;AACV,CAAC,EAHI,eAAe,KAAf,eAAe,GAGnB,EAAA,CAAA,CAAA,CAAA;AAID;;AAEG;MACU,KAAK,CAAA;AAChB;;;;;AAKG;AACI,IAAA,aAAa,IAAI,CAAC,GAAW,EAAA;AAClC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;AACnC,YAAA,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,QAAQ,EAAE;gBAC/E,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,gBAAA,OAAO,EAAE,CAAC;AACX,aAAA;AAAM,iBAAA;AACL,gBAAA,IAAI,CAAC,aAAa,CAAC,GAAG,EAAE,MAAK;oBAC3B,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,MAAM,CAAC;AACxC,oBAAA,OAAO,EAAE,CAAC;AACZ,iBAAC,CAAC,CAAC;AACJ,aAAA;AACH,SAAC,CAAC,CAAC;KACJ;AAED;;;;AAIG;AACI,IAAA,aAAa,MAAM,CAAC,GAAW,EAAA;AACpC,QAAA,OAAO,IAAI,OAAO,CAAO,CAAC,OAAO,KAAI;YACnC,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,eAAe,CAAC,MAAM,EAAE;AAC7C,gBAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,CAAC;AAC3B,aAAA;AACD,YAAA,OAAO,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACtB,YAAA,OAAO,EAAE,CAAC;AACZ,SAAC,CAAC,CAAC;KACJ;AAKO,IAAA,OAAO,aAAa,CAAC,GAAW,EAAE,OAAiB,EAAA;QACzD,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,EAAE;YACrC,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;AACjC,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;AACnC,SAAA;KACF;IAEO,OAAO,eAAe,CAAC,GAAW,EAAA;AACxC,QAAA,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,SAAS,IAAI,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,MAAM,GAAG,CAAC,EAAE;YACvE,MAAM,OAAO,GAAG,IAAI,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,CAAC;YAC5C,YAAY,CAAC,MAAK;AAChB,gBAAA,OAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACtB,aAAC,CAAC,CAAC;AACJ,SAAA;KACF;;AAlBc,KAAI,CAAA,IAAA,GAAuC,EAAE,CAAC;AAC9C,KAAS,CAAA,SAAA,GAAkC,EAAE;;AClD9D;AAoDA;;;AAGG;MACU,SAAS,CAAA;AAKpB,IAAA,WAAA,GAAA;QAHiB,IAAK,CAAA,KAAA,GAAW,OAAO,CAAC;AAIvC,QAAA,IAAI,CAAC,YAAY,GAAG,IAAI,iBAAiB,EAAE,CAAC;KAC7C;AAED;;;;AAIG;IACI,uBAAuB,GAAA;AAC5B,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,uBAAuB,EAAE,CAAC;KACpD;AAED;;AAEG;IACI,kBAAkB,GAAA;AACvB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,kBAAkB,EAAE,CAAC;KAC/C;AAED;;AAEG;IACI,cAAc,GAAA;AACnB,QAAA,OAAO,IAAI,CAAC,YAAY,CAAC,cAAc,EAAE,CAAC;KAC3C;AAEO,IAAA,MAAM,qBAAqB,CACjC,UAA2B,EAC3B,sBAA2C,EAAA;QAE3C,MAAM,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QAE7B,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,gBAAgB,CAAC,UAAU,CAAC,CAAC;YAC/C,MAAM,sBAAsB,EAAE,CAAC;AAC/B,YAAA,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,UAAU,CAAC,CAAC;AACjD,SAAA;AAAS,gBAAA;YACR,MAAM,KAAK,CAAC,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AAChC,SAAA;KACF;AAEO,IAAA,YAAY,CAAC,SAAqC,EAAA;AACxD,QAAA,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;AACnB,YAAA,IAAI,CAAC,SAAS,GAAG,SAAS,CAAC;AAC5B,SAAA;AACD,QAAA,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,EAAE;YAChC,MAAM,IAAI,UAAU,CAClB,CAAA,sFAAA,EAAyF,IAAI,CAAC,SAAS,CAAc,YAAA,CAAA,CACtH,CAAC;AACH,SAAA;KACF;AAqCM,IAAA,MAAM,UAAU,CACrB,eAAoC,EACpC,mBAKa,EACb,OAA2B,EAAA;AAE3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;QAEnF,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACJ,eAAM,IAAI,mBAAmB,YAAY,0BAA0B;AACnE,gBAAA,mBAAmB,YAAY,mBAAmB;AAClD,gBAAAe,0BAAiB,CAAC,mBAAmB,CAAC,CAAC,EACzC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,mBAAmB,CAAC;AAClC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,OAAO,GAAG,mBAAwC,CAAC;AACpD,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QAEzF,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;YAC5B,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;AACT,gBAAA,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAC5E,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;IAgDM,MAAM,iBAAiB,CAC5B,eAAoC,EACpC,gBAIc,EACd,aAA+C,EAC/C,OAA4B,EAAA;AAE5B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,IAAI,UAA8E,CAAC;AACnF,QAAA,IAAI,IAAgB,CAAC;QAErB,IACE,OAAO,eAAe,KAAK,QAAQ;AACnC,aAAC,CAACrB,eAAM,IAAI,gBAAgB,YAAY,0BAA0B;AAChE,gBAAA,gBAAgB,YAAY,mBAAmB;AAC/C,gBAAAe,0BAAiB,CAAC,gBAAgB,CAAC,CAAC,EACtC;;YAEA,GAAG,GAAG,eAAe,CAAC;YACtB,UAAU,GAAG,gBAGM,CAAC;YACpB,IAAI,GAAG,aAA2B,CAAC;AACpC,SAAA;aAAM,IAAI,eAAe,YAAY,UAAU,EAAE;;AAEhD,YAAA,GAAG,GAAG,eAAe,CAAC,GAAG,CAAC;AAC1B,YAAA,UAAU,GAAG,eAAe,CAAC,UAAU,CAAC;YACxC,IAAI,GAAG,gBAA8B,CAAC;YACtC,OAAO,GAAG,aAAmC,CAAC;AAC/C,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,UAAU,CAClB,+EAA+E,CAChF,CAAC;AACH,SAAA;QAED,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;AACd,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAE1F,IAAI;AACF,YAAA,IAAI,CAAC,YAAY,CAAC,eAAe,CAAC,CAAC;YACnC,MAAM,IAAI,CAAC,qBAAqB,CAC9B;AACE,gBAAA,GAAG,EAAE,GAAG;AACR,gBAAA,UAAU,EAAE,UAAU;aACvB,EACD,YAAW;gBACT,MAAM,IAAI,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,YAAY,CAAC,cAAc,CAAC,UAAU,CAAC,CAAC,CAAC,aAAa,CACnF,IAAI,EACJ,cAAc,CACf,CAAC;AACJ,aAAC,CACF,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEM,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF,CAAA;AAED;;;AAGG;AACH,MAAM,iBAAiB,CAAA;AASrB,IAAA,WAAA,GAAA;AACE,QAAA,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;AACxB,QAAA,IAAI,CAAC,IAAI,GAAG,EAAE,CAAC;AAEf,QAAA,MAAM,QAAQ,GAAGD,qBAAY,EAAE,CAAC;;AAGhC,QAAA,IAAI,CAAC,QAAQ,GAAG,CAAS,MAAA,EAAA,QAAQ,EAAE,CAAC;;;;QAIpC,IAAI,CAAC,gBAAgB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,GAAG,gBAAgB,CAAA,EAAG,eAAe,CAAC,YAAY,qBAAqB,gBAAgB,CAAA,EAAG,eAAe,CAAC,yBAAyB,UAAU,CAAC;;QAExL,IAAI,CAAC,oBAAoB,GAAG,CAAA,0BAAA,EAA6B,IAAI,CAAC,QAAQ,EAAE,CAAC;;QAEzE,IAAI,CAAC,kBAAkB,GAAG,CAAA,EAAA,EAAK,IAAI,CAAC,QAAQ,IAAI,CAAC;AAEjD,QAAA,IAAI,CAAC,WAAW,GAAG,IAAI,GAAG,EAAE,CAAC;KAC9B;AAED;;;;;;AAMG;AACI,IAAA,cAAc,CACnB,UAA8E,EAAA;AAE9E,QAAA,MAAM,gBAAgB,GAAG,UAAU,YAAY,mBAAmB,CAAC;AACnE,QAAA,MAAM,mBAAmB,GAAG,CAAC,IAAI,gBAAgB,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;AAC3D,QAAA,MAAM,SAAS,GAA2B,IAAI,KAAK,CAAC,mBAAmB,CAAC,CAAC;QAEzE,SAAS,CAAC,CAAC,CAAC,GAAGT,8BAAqB,EAAE,CAAC;QACvC,SAAS,CAAC,CAAC,CAAC,GAAG,IAAI,8BAA8B,EAAE,CAAC;QACpD,IAAI,CAAC,gBAAgB,EAAE;AACrB,YAAA,SAAS,CAAC,CAAC,CAAC,GAAGI,0BAAiB,CAAC,UAAU,CAAC;kBACxC,gBAAgB,CACdmB,wCAA+B,CAAC,UAAU,EAAE,kBAAkB,CAAC,EAC/D,UAAU,CACX;kBACD,UAAU,CAAC;AAChB,SAAA;AACD,QAAA,SAAS,CAAC,mBAAmB,GAAG,CAAC,CAAC,GAAG,IAAI,iCAAiC,CAAC,IAAI,CAAC,CAAC;AAEjF,QAAA,OAAO,IAAI,QAAQ,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;KACpC;AAEM,IAAA,sBAAsB,CAAC,OAAoB,EAAA;;QAEhD,IAAI,CAAC,IAAI,IAAI;AACX,YAAA,IAAI,CAAC,gBAAgB;AACrB,YAAA,CAAA,EAAG,eAAe,CAAC,UAAU,KAAK,IAAI,CAAC,cAAc,CAAE,CAAA;YACvD,EAAE;AACF,YAAA,CAAA,EAAG,OAAO,CAAC,MAAM,CAAC,QAAQ,EAAE,IAAI,kBAAkB,CAChD,OAAO,CAAC,GAAG,CACZ,CAAI,CAAA,EAAA,gBAAgB,GAAG,gBAAgB,CAAA,CAAE;AAC3C,SAAA,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;QAEzB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;AACnD,YAAA,IAAI,CAAC,IAAI,IAAI,CAAA,EAAG,MAAM,CAAC,IAAI,CAAK,EAAA,EAAA,MAAM,CAAC,KAAK,CAAG,EAAA,gBAAgB,EAAE,CAAC;AACnE,SAAA;AAED,QAAA,IAAI,CAAC,IAAI,IAAI,gBAAgB,CAAC;;;KAG/B;AAEM,IAAA,gBAAgB,CAAC,UAA2B,EAAA;AACjD,QAAA,IAAI,IAAI,CAAC,cAAc,IAAI,iBAAiB,EAAE;AAC5C,YAAA,MAAM,IAAI,UAAU,CAAC,iBAAiB,iBAAiB,CAAA,+BAAA,CAAiC,CAAC,CAAC;AAC3F,SAAA;;QAGD,MAAM,IAAI,GAAG,UAAU,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;AACxC,QAAA,IAAI,CAAC,IAAI,IAAI,IAAI,KAAK,EAAE,EAAE;YACxB,MAAM,IAAI,UAAU,CAAC,CAAA,8BAAA,EAAiC,UAAU,CAAC,GAAG,CAAG,CAAA,CAAA,CAAC,CAAC;AAC1E,SAAA;KACF;AAEM,IAAA,iBAAiB,CAAC,UAA2B,EAAA;QAClD,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,IAAI,CAAC,cAAc,EAAE,UAAU,CAAC,CAAC;QACtD,IAAI,CAAC,cAAc,EAAE,CAAC;KACvB;;IAGM,kBAAkB,GAAA;QACvB,OAAO,CAAA,EAAG,IAAI,CAAC,IAAI,CAAA,EAAG,IAAI,CAAC,kBAAkB,CAAA,EAAG,gBAAgB,CAAA,CAAE,CAAC;KACpE;IAEM,uBAAuB,GAAA;QAC5B,OAAO,IAAI,CAAC,oBAAoB,CAAC;KAClC;IAEM,cAAc,GAAA;QACnB,OAAO,IAAI,CAAC,WAAW,CAAC;KACzB;AACF,CAAA;AAED,MAAM,0BAA2B,SAAQjC,0BAAiB,CAAA;AAQxD,IAAA,WAAA,CACE,YAA+B,EAC/B,UAAyB,EACzB,OAA6B,EAAA;AAE7B,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;AAXZ,QAAA,IAAA,CAAA,aAAa,GAA0B;YACtD,OAAO,EAAE,IAAIkC,oBAAW,EAAE;AAC1B,YAAA,MAAM,EAAE,GAAG;YACX,OAAO,EAAE,IAAI/B,oBAAW,EAAE;SAC3B,CAAC;AASA,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,MAAM,IAAI,CAAC,YAAY,CAAC,sBAAsB,CAAC,OAAO,CAAC,CAAC;AAExD,QAAA,OAAO,IAAI,CAAC,aAAa,CAAC;KAC3B;AACF,CAAA;AAED,MAAM,iCAAiC,CAAA;AAGrC,IAAA,WAAA,CAAY,YAA+B,EAAA;AACzC,QAAA,IAAI,CAAC,YAAY,GAAG,YAAY,CAAC;KAClC;IAEM,MAAM,CACX,UAAyB,EACzB,OAA6B,EAAA;QAE7B,OAAO,IAAI,0BAA0B,CAAC,IAAI,CAAC,YAAY,EAAE,UAAU,EAAE,OAAO,CAAC,CAAC;KAC/E;AACF,CAAA;AAED,MAAM,uBAAwB,SAAQH,0BAAiB,CAAA;;;IAGrD,WAAY,CAAA,UAAyB,EAAE,OAA6B,EAAA;AAClE,QAAA,KAAK,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KAC5B;IAEM,MAAM,WAAW,CAAC,OAAoB,EAAA;QAC3C,IAAI,aAAa,GAAG,EAAE,CAAC;QAEvB,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE;YACnD,IAAI,MAAM,CAAC,MAAM,CAAC,IAAI,EAAE,eAAe,CAAC,YAAY,CAAC,EAAE;AACrD,gBAAA,aAAa,GAAG,MAAM,CAAC,IAAI,CAAC;AAC7B,aAAA;AACF,SAAA;QAED,IAAI,aAAa,KAAK,EAAE,EAAE;YACxB,OAAO,CAAC,OAAO,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;AACvC,SAAA;QAED,OAAO,IAAI,CAAC,WAAW,CAAC,WAAW,CAAC,OAAO,CAAC,CAAC;KAC9C;AACF,CAAA;AAED,MAAM,8BAA8B,CAAA;IAC3B,MAAM,CAAC,UAAyB,EAAE,OAA6B,EAAA;AACpE,QAAA,OAAO,IAAI,uBAAuB,CAAC,UAAU,EAAE,OAAO,CAAC,CAAC;KACzD;AACF;;AC9fD;AAuDA;;;;AAIG;MACU,eAAe,CAAA;IA8B1B,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;aAAM,IAAI,CAAC,oBAAoB,EAAE;;YAEhC,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA;AACL,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAED,QAAA,MAAM,oBAAoB,GAAG,IAAI,oBAAoB,CAAC,GAAG,EAAE,QAAQ,CAAC,sBAAsB,EAAE,CAAC,CAAC;AAE9F,QAAA,MAAM,IAAI,GAAG,UAAU,CAAC,GAAG,CAAC,CAAC;AAC7B,QAAA,IAAI,IAAI,IAAI,IAAI,KAAK,GAAG,EAAE;;YAExB,IAAI,CAAC,yBAAyB,GAAG,IAAI,SAAS,CAAC,oBAAoB,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA;YACL,IAAI,CAAC,yBAAyB,GAAG,IAAI,OAAO,CAAC,oBAAoB,CAAC,CAAC;AACpE,SAAA;KACF;AAED;;;AAGG;IACI,WAAW,GAAA;QAChB,OAAO,IAAI,SAAS,EAAE,CAAC;KACxB;AAsCM,IAAA,MAAM,WAAW,CACtB,iBAA0C,EAC1C,mBAKa;;;IAGb,OAA2B,EAAA;AAE3B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;gBACvC,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAsC,EAAE,OAAO,CAAC,CAAC;AAC1F,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,UAAU,CAAC,eAAe,EAAE,mBAAwC,CAAC,CAAC;AACnF,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAkDM,IAAA,MAAM,kBAAkB,CAC7B,iBAA0C,EAC1C,gBAIc,EACd,aAA+C;;;IAG/C,OAA4B,EAAA;AAE5B,QAAA,MAAM,KAAK,GAAG,IAAI,SAAS,EAAE,CAAC;AAC9B,QAAA,KAAK,MAAM,eAAe,IAAI,iBAAiB,EAAE;AAC/C,YAAA,IAAI,OAAO,eAAe,KAAK,QAAQ,EAAE;AACvC,gBAAA,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAAmC,EACnC,aAA2B,EAC3B,OAAO,CACR,CAAC;AACH,aAAA;AAAM,iBAAA;gBACL,MAAM,KAAK,CAAC,iBAAiB,CAC3B,eAAe,EACf,gBAA8B,EAC9B,aAAmC,CACpC,CAAC;AACH,aAAA;AACF,SAAA;AACD,QAAA,OAAO,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;KAChC;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAkCG;AACI,IAAA,MAAM,WAAW,CACtB,YAAuB,EACvB,UAA8C,EAAE,EAAA;QAEhD,IAAI,CAAC,YAAY,IAAI,YAAY,CAAC,cAAc,EAAE,CAAC,IAAI,KAAK,CAAC,EAAE;AAC7D,YAAA,MAAM,IAAI,UAAU,CAAC,wDAAwD,CAAC,CAAC;AAChF,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QACpF,IAAI;AACF,YAAA,MAAM,gBAAgB,GAAG,YAAY,CAAC,kBAAkB,EAAE,CAAC;;AAG3D,YAAA,MAAM,gBAAgB,GACpB,MAAM,IAAI,CAAC,yBAAyB,CAAC,WAAW,CAC9C,cAAc,CAAC,gBAAgB,CAAC,EAChC,YAAY,CAAC,uBAAuB,EAAE,EACtC,gBAAgB,EAEX,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;;AAGJ,YAAA,MAAM,mBAAmB,GAAG,IAAI,mBAAmB,CACjD,gBAAgB,EAChB,YAAY,CAAC,cAAc,EAAE,CAC9B,CAAC;AACF,YAAA,MAAM,eAAe,GAAG,MAAM,mBAAmB,CAAC,kBAAkB,EAAE,CAAC;AAEvE,YAAA,MAAM,GAAG,GAAiC;gBACxC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,WAAW,EAAE,gBAAgB,CAAC,WAAW;gBACzC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,SAAS,EAAE,gBAAgB,CAAC,SAAS;gBACrC,eAAe,EAAE,gBAAgB,CAAC,eAAe;gBACjD,OAAO,EAAE,gBAAgB,CAAC,OAAO;gBACjC,YAAY,EAAE,eAAe,CAAC,YAAY;gBAC1C,0BAA0B,EAAE,eAAe,CAAC,0BAA0B;gBACtE,uBAAuB,EAAE,eAAe,CAAC,uBAAuB;aACjE,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEoB,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AACF;;ACgRD;;AAEG;AACG,MAAO,eAAgB,SAAQ,aAAa,CAAA;IAgEhD,WACE,CAAA,qBAA6B,EAC7B,mCAKgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,GAAW,CAAC;AAChB,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,IAAI,cAAc,CAAC,mCAAmC,CAAC,EAAE;;YAEvD,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,mCAAmC,CAAC;AAChD,SAAA;AAAM,aAAA,IACL,CAACrB,eAAM,IAAI,mCAAmC,YAAY,0BAA0B;AACpF,YAAA,mCAAmC,YAAY,mBAAmB;YAClEe,0BAAiB,CAAC,mCAAmC,CAAC,EACtD;;YAEA,GAAG,GAAG,qBAAqB,CAAC;AAC5B,YAAA,QAAQ,GAAG,WAAW,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;AACtE,SAAA;AAAM,aAAA,IACL,CAAC,mCAAmC;YACpC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;;YAGA,GAAG,GAAG,qBAAqB,CAAC;YAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AAAM,aAAA,IACL,mCAAmC;YACnC,OAAO,mCAAmC,KAAK,QAAQ,EACvD;;YAEA,MAAM,aAAa,GAAG,mCAAmC,CAAC;AAE1D,YAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,qBAAqB,CAAC,CAAC;AAC3E,YAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,gBAAA,IAAIf,eAAM,EAAE;AACV,oBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AACF,oBAAA,GAAG,GAAG,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,CAAC;AAE7E,oBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;wBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,qBAAA;AAED,oBAAA,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;AACtD,iBAAA;AAAM,qBAAA;AACL,oBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,iBAAA;AACF,aAAA;AAAM,iBAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;gBAClD,GAAG;oBACD,eAAe,CAAC,cAAc,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC;wBACtE,GAAG;wBACH,cAAc,CAAC,UAAU,CAAC;gBAC5B,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,aAAA;AACF,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CAAC,yDAAyD,CAAC,CAAC;AAC5E,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AACrB,QAAA,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,uBAAuB,EAAE,CAAC;QACrD,IAAI,CAAC,gBAAgB,GAAG,IAAI,SAAS,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAClE;AApID;;AAEG;AACH,IAAA,IAAW,aAAa,GAAA;QACtB,OAAO,IAAI,CAAC,cAAc,CAAC;KAC5B;AAiID;;;;;;;;;;;;;;;AAeG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;;;AAGF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACpC,OAAO,CAAA,EACP,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,iBAAiB,CAC5B,OAAA,GAAkC,EAAE,EAAA;;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,wBAAwB,EAAE;gBACrD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EACL,iFAAiF;AACpF,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YAED,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,MAAM,CAAC,OAAA,GAAkC,EAAE,EAAA;AACtD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;YACF,MAAM,IAAI,CAAC,aAAa,CAAC;gBACvB,WAAW,EAAE,OAAO,CAAC,WAAW;gBAChC,cAAc,EAAE,cAAc,CAAC,cAAc;AAC9C,aAAA,CAAC,CAAC;AACH,YAAA,OAAO,IAAI,CAAC;AACb,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;AACf,YAAA,IAAI,CAAC,CAAC,UAAU,KAAK,GAAG,EAAE;gBACxB,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,sDAAsD;AAChE,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAO,KAAK,CAAC;AACd,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,aAAa,CAAC,QAAgB,EAAA;AACnC,QAAA,OAAO,IAAI,UAAU,CAAC,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KAC/F;AAED;;;;AAIG;AACI,IAAA,mBAAmB,CAAC,QAAgB,EAAA;AACzC,QAAA,OAAO,IAAI,gBAAgB,CACzB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;;;;AAcG;AACI,IAAA,kBAAkB,CAAC,QAAgB,EAAA;AACxC,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;AAIG;AACI,IAAA,iBAAiB,CAAC,QAAgB,EAAA;AACvC,QAAA,OAAO,IAAI,cAAc,CACvB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,QAAQ,CAAC,CAAC,EACvD,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAyC,EAAE,EAAA;AAE3C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,+BAA+B,EAAE,OAAO,CAAC,CAAC;QACtF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,aAAa,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,OAAO,CAAC,UAAU,CAAA,EAClB,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,MAAM,CACjB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wBAAwB,EAAE,OAAO,CAAC,CAAC;QAC/E,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,MAAM,CACvC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,gCAAgC,EAAE,OAAO,CAAC,CAAC;QAEvF,IAAI;YACF,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC9C,OACE,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,IAAI,EACZ,EAAA,GAAG,CACN,EAAA,EAAA,SAAS,EAAE,GAAG,CAAC,SAAS,EACxB,CAAA,CAAA;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAA,MAAA,CAAC,CAAC,OAAO,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,SAAS,MAAK,mBAAmB,EAAE;gBAChD,IAAI,CAAC,SAAS,CAAC;oBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;AAC1B,oBAAA,OAAO,EAAE,iEAAiE;AAC3E,iBAAA,CAAC,CAAC;AACH,gBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,SAAS,EAAE,KAAK,EACb,EAAA,CAAA,EAAA,GAAA,CAAC,CAAC,QAAQ,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,aAAa,KAC5B,SAAS,EAAE,CAAC,CAAC,QAAQ,EACrB,CAAA,CAAA;AACH,aAAA;YACD,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACI,IAAA,MAAM,WAAW,CACtB,QAAmB,EACnB,UAAuC,EAAE,EAAA;AAEzC,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,IAAI,OAAO,CAAC,UAAU,CAAC,iBAAiB,EAAE;AACxC,YAAA,MAAM,IAAI,UAAU,CAClB,mGAAmG,CACpG,CAAC;AACH,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,6BAA6B,EAAE,OAAO,CAAC,CAAC;QAEpF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,QAAQ,EACR,wBAAwB,EAAE,OAAO,CAAC,UAAU,EAAA,EACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,eAAe,CAC1B,OAAA,GAA2C,EAAE,EAAA;AAE7C,QAAA,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE;AACvB,YAAA,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;AACzB,SAAA;AAED,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QAExF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAAA,MAAA,CAAA,MAAA,CAAA,EAC1D,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACtC,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AAEH,YAAA,MAAM,GAAG,GAAqC;gBAC5C,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,gBAAgB,EAAE,QAAQ,CAAC,gBAAgB;gBAC3C,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,IAAI,EAAE,QAAQ,CAAC,IAAI;gBACnB,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,YAAY,EAAE,QAAQ,CAAC,YAAY;gBACnC,SAAS,EAAE,QAAQ,CAAC,SAAS;gBAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe;AACzC,gBAAA,iBAAiB,EAAE,EAAE;gBACrB,OAAO,EAAE,QAAQ,CAAC,OAAO;aAC1B,CAAC;AAEF,YAAA,KAAK,MAAM,UAAU,IAAI,QAAQ,EAAE;gBACjC,IAAI,YAAY,GAAQ,SAAS,CAAC;gBAClC,IAAI,UAAU,CAAC,YAAY,EAAE;AAC3B,oBAAA,YAAY,GAAG;AACb,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;qBACjD,CAAC;AAEF,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,SAAS,EAAE;AACrC,wBAAA,YAAY,CAAC,SAAS,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;AACtE,qBAAA;AAED,oBAAA,IAAI,UAAU,CAAC,YAAY,CAAC,QAAQ,EAAE;AACpC,wBAAA,YAAY,CAAC,QAAQ,GAAG,IAAI,IAAI,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAC;AACpE,qBAAA;AACF,iBAAA;AAED,gBAAA,GAAG,CAAC,iBAAiB,CAAC,IAAI,CAAC;oBACzB,YAAY;oBACZ,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;AAgBG;IACI,MAAM,eAAe,CAC1B,MAAyB,EACzB,YAAiC,EACjC,UAA2C,EAAE,EAAA;QAE7C,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC,UAAU,IAAI,EAAE,CAAC;AAC9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,GAAG,GAA4B,EAAE,CAAC;AACxC,YAAA,KAAK,MAAM,UAAU,IAAI,YAAY,IAAI,EAAE,EAAE;gBAC3C,GAAG,CAAC,IAAI,CAAC;AACP,oBAAA,YAAY,EAAE;AACZ,wBAAA,SAAS,EAAE,UAAU,CAAC,YAAY,CAAC,SAAS;8BACxC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,SAAS,CAAC;AACzD,8BAAE,EAAE;AACN,wBAAA,WAAW,EAAE,UAAU,CAAC,YAAY,CAAC,WAAW;AAChD,wBAAA,QAAQ,EAAE,UAAU,CAAC,YAAY,CAAC,QAAQ;8BACtC,oBAAoB,CAAC,UAAU,CAAC,YAAY,CAAC,QAAQ,CAAC;AACxD,8BAAE,EAAE;AACP,qBAAA;oBACD,EAAE,EAAE,UAAU,CAAC,EAAE;AAClB,iBAAA,CAAC,CAAC;AACJ,aAAA;AAED,YAAA,OAAO,MAAM,IAAI,CAAC,gBAAgB,CAAC,eAAe,CAChD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACN,YAAY,EAAE,GAAG,EACjB,qBAAqB,EAAE,OAAO,CAAC,UAAU,EACzC,wBAAwB,EAAE,OAAO,CAAC,UAAU,IACzC,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;AAKG;AACI,IAAA,kBAAkB,CAAC,cAAuB,EAAA;AAC/C,QAAA,OAAO,IAAI,eAAe,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;KAClD;AAED;;;;;;;;;;;;;;;;;;;;;AAqBG;IACI,MAAM,eAAe,CAC1B,QAAgB,EAChB,IAAqB,EACrB,aAAqB,EACrB,OAAA,GAAkC,EAAE,EAAA;AAEpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,QAAQ,CAAC,CAAC;AAC1D,YAAA,MAAM,QAAQ,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,IAAI,EAAE,aAAa,EAAE,cAAc,CAAC,CAAC;YACnF,OAAO;gBACL,eAAe;gBACf,QAAQ;aACT,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,MAAM,UAAU,CACrB,QAAgB,EAChB,UAAsC,EAAE,EAAA;AAExC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,4BAA4B,EAAE,OAAO,CAAC,CAAC;QACnF,IAAI;YACF,IAAI,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;YAC9C,IAAI,OAAO,CAAC,SAAS,EAAE;gBACrB,UAAU,GAAG,UAAU,CAAC,WAAW,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;AACxD,aAAA;AACD,YAAA,OAAO,MAAM,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AAChD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACK,IAAA,MAAM,mBAAmB,CAC/B,MAAe,EACf,UAA4C,EAAE,EAAA;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,mBAAmB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAC9D,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;AACnD,gBAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;AAClF,aAAA;AAED,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,qCAAqC,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAElF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;qBACjB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;AAUG;IACK,MAAM,wBAAwB,CACpC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QACF,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,wBAAwB,CAAC,SAAS,gCAC7E,MAAM,EAAA,EACH,OAAO,CACP,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AAEH,YAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,EAAE,CAAC;YAChC,IAAK,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,KAAK,SAAS,EAAE;AACnD,gBAAA,QAAQ,CAAC,OAAO,CAAC,SAAS,GAAG,gBAAgB,CAAE,QAAQ,CAAC,OAAe,CAAC,MAAM,CAAC,CAAC,CAAC;AAClF,aAAA;AAED,YAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,EAAE,CAAC;YACnC,IAAK,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,KAAK,SAAS,EAAE;AACzD,gBAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,GAAG,mBAAmB,CAChD,QAAQ,CAAC,OAAe,CAAC,YAAY,CAAC,CACxC,CAAC;AACH,aAAA;AAED,YAAA,MAAM,eAAe,GAChB,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,QAAQ,KACX,SAAS,EAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACJ,QAAQ,CAAC,SAAS,CACrB,EAAA,EAAA,UAAU,EAAE,0CAA0C,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,CAAC,EAEvF,CAAA,EAAA,OAAO,kCACF,QAAQ,CAAC,OAAO,CACnB,EAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,eAAe,KAAI;AAC5D,wBAAA,MAAM,QAAQ,GAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACT,eAAe,CAAA,EAAA,EAClB,IAAI,EAAE,gBAAgB,CAAC,eAAe,CAAC,IAAI,CAAC,EAC5C,IAAI,EAAE,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,EACtC,iCAAiC,EAAE,4BAA4B,CAC7D,eAAe,CAAC,yBAAyB,CAC1C,GACF,CAAC;AACF,wBAAA,OAAO,QAAQ,CAAC;AAClB,qBAAC,CAAC,EACF,YAAY,EAAE,CAAA,EAAA,GAAA,QAAQ,CAAC,OAAO,CAAC,YAAY,0CAAE,GAAG,CAAC,CAAC,kBAAkB,KAAI;AACtE,wBAAA,MAAM,UAAU,GAAe;AAC7B,4BAAA,IAAI,EAAE,gBAAgB,CAAC,kBAAkB,CAAC,IAAI,CAAC;yBAChD,CAAC;AACF,wBAAA,OAAO,UAAU,CAAC;qBACnB,CAAC,MAEL,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,4BAA4B,CAAC;AACjC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,4BAA4B,GAAG,MAAME,aAAA,CAAA,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAC/E,oBAAA,MAAM,GAAG,4BAA4B,CAAC,iBAAiB,CAAC;AACxD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,4BAA4B,CAAA,CAAA,CAAC;AAC1C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAiD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAxE,MAAM,4BAA4B,WAAA,CAAA;AAC3C,oBAAA,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,4BAA4B,CAAC,OAAO,CAAC,SAAS,CAAA,CAAA,CAAA,CAAC;AACvD,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqEG;IACI,aAAa,CAClB,UAAqC,EAAE,EAAA;QAEvC,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAGF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC;QAC5C,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;AAYG;AACY,IAAA,qBAAqB,CAClC,SAAiB,EACjB,MAAe,EACf,UAA4C,EAAE,EAAA;;AAE9C,YAAA,IAAI,iCAAiC,CAAC;AACtC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,iCAAiC,GAAG,MAAAT,aAAA,CAAM,IAAI,CAAC,wBAAwB,CACrE,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,CAAC;AACF,oBAAA,MAAM,GAAG,iCAAiC,CAAC,iBAAiB,CAAC;AAC7D,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,iCAAiC,CAAA,CAAA,CAAC;AAC/C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;AAKG;AACY,IAAA,oBAAoB,CACjC,SAAiB,EACjB,OAAA,GAA4C,EAAE,EAAA;;;AAE9C,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAAsD,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,qBAAqB,CAC9E,SAAS,EACT,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,iCAAiC,WAAA,CAAA;AAKhD,oBAAA,MAAM,OAAO,GAAG,iCAAiC,CAAC,OAAO,CAAC;oBAC1D,IAAI,OAAO,CAAC,YAAY,EAAE;AACxB,wBAAA,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,YAAY,EAAE;AACzC,4BAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACE,IAAI,EAAE,QAAQ,EACX,EAAA,MAAM,EACV,CAAC;AACH,yBAAA;AACF,qBAAA;AACD,oBAAA,KAAK,MAAM,IAAI,IAAI,OAAO,CAAC,SAAS,EAAE;AACpC,wBAAA,MAAA,MAAAA,aAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAQ,IAAI,EAAE,MAAM,EAAK,EAAA,IAAI,EAAE,CAAC;AACjC,qBAAA;AACF,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA6EG;AACI,IAAA,oBAAoB,CACzB,SAAiB,EACjB,OAAA,GAAqC,EAAE,EAAA;QAKvC,IAAI,SAAS,KAAK,EAAE,EAAE;AACpB,YAAA,MAAM,IAAI,UAAU,CAAC,iDAAiD,CAAC,CAAC;AACzE,SAAA;QAED,MAAM,OAAO,GAA2B,EAAE,CAAC;QAC3C,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,sBAAsB,EAAE;AAClC,YAAA,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC;AAClC,SAAA;QACD,IAAI,OAAO,CAAC,WAAW,EAAE;AACvB,YAAA,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;AACtB,SAAA;QACD,IAAI,OAAO,CAAC,0BAA0B,EAAE;AACtC,YAAA,OAAO,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;AACrC,SAAA;QACD,IAAI,OAAO,CAAC,yBAAyB,EAAE;AACrC,YAAA,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;AACpC,SAAA;QACD,IAAI,OAAO,CAAC,gBAAgB,EAAE;AAC5B,YAAA,OAAO,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;AAC3B,SAAA;AACD,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,cAAc,GACf,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,OAAO,CACP,GAAC,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,EACnD,CAAC;;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,EAAE,cAAc,CAAC,CAAC;QAClE,OAAO;AACL;;AAEG;AACH,YAAA,MAAM,IAAI,GAAA;AACR,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,qBAAqB,CAAC,SAAS,EAAE,QAAQ,CAAC,iBAAiB,EACrE,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,cAAc,EACjB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;;;;;;AAgBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;AAEpD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAE/F,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,gBAAgB,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACtD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEF,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAkD,EAAE,EAAA;;AAEpD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAkD,EAAE,EAAA;;;AAEpD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA4EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAA0C,EAAE,EAAA;;AAG5C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;IAEO,uBAAuB,GAAA;AAC7B,QAAA,IAAI,aAAa,CAAC;QAClB,IAAI;;;;;;YAOF,MAAM,SAAS,GAAGjC,mBAAU,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAE7C,YAAA,IAAI,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,EAAE;;;;AAIjD,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA,IAAI,iBAAiB,CAAC,SAAS,CAAC,EAAE;;;;AAIvC,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;AAAM,iBAAA;;;AAGL,gBAAA,aAAa,GAAG,SAAS,CAAC,OAAO,EAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpD,aAAA;;AAGD,YAAA,aAAa,GAAG,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAElD,IAAI,CAAC,aAAa,EAAE;AAClB,gBAAA,MAAM,IAAI,KAAK,CAAC,oCAAoC,CAAC,CAAC;AACvD,aAAA;AAED,YAAA,OAAO,aAAa,CAAC;AACtB,SAAA;AAAC,QAAA,OAAO,KAAU,EAAE;AACnB,YAAA,MAAM,IAAI,KAAK,CAAC,4DAA4D,CAAC,CAAC;AAC/E,SAAA;KACF;AAED;;;;;;;;;;AAUG;AACI,IAAA,cAAc,CAAC,OAAuC,EAAA;AAC3D,QAAA,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,KAAI;YAC7B,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,gBAAA,MAAM,IAAI,UAAU,CAClB,uFAAuF,CACxF,CAAC;AACH,aAAA;AAED,YAAA,MAAM,GAAG,GAAG,8BAA8B,iBAEtC,aAAa,EAAE,IAAI,CAAC,cAAc,IAC/B,OAAO,CAAA,EAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;YAEb,OAAO,CAAC,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC,CAAC;AAC3C,SAAC,CAAC,CAAC;KACJ;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AACF;;ACtnED;AACA;AAEA;;;;;;;;AAQG;MACU,qBAAqB,CAAA;AAAlC,IAAA,WAAA,GAAA;AA4GE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAa,CAAA,aAAA,GAAY,KAAK,CAAC;AAEtC;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAG,CAAA,GAAA,GAAY,KAAK,CAAC;AAE5B;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;AAE/B;;AAEG;QACI,IAAqB,CAAA,qBAAA,GAAY,KAAK,CAAC;AAE9C;;AAEG;QACI,IAAe,CAAA,eAAA,GAAY,KAAK,CAAC;KA0DzC;AApOC;;;;AAIG;IACI,OAAO,KAAK,CAAC,WAAmB,EAAA;AACrC,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE1D,QAAA,KAAK,MAAM,CAAC,IAAI,WAAW,EAAE;AAC3B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;oBACnC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;oBAC3C,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAClC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACrC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;oBACjC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACpC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;oBACnD,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;oBAC7C,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,iCAAiC,CAAC,CAAA,CAAE,CAAC,CAAC;AAC9D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAED;;;;;AAKG;IACI,OAAO,IAAI,CAAC,cAAyC,EAAA;AAC1D,QAAA,MAAM,qBAAqB,GAAG,IAAI,qBAAqB,EAAE,CAAC;QAC1D,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,KAAK,EAAE;AACxB,YAAA,qBAAqB,CAAC,KAAK,GAAG,IAAI,CAAC;AACpC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,aAAa,EAAE;AAChC,YAAA,qBAAqB,CAAC,aAAa,GAAG,IAAI,CAAC;AAC5C,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,IAAI,EAAE;AACvB,YAAA,qBAAqB,CAAC,IAAI,GAAG,IAAI,CAAC;AACnC,SAAA;QACD,IAAI,cAAc,CAAC,GAAG,EAAE;AACtB,YAAA,qBAAqB,CAAC,GAAG,GAAG,IAAI,CAAC;AAClC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,MAAM,EAAE;AACzB,YAAA,qBAAqB,CAAC,MAAM,GAAG,IAAI,CAAC;AACrC,SAAA;QACD,IAAI,cAAc,CAAC,OAAO,EAAE;AAC1B,YAAA,qBAAqB,CAAC,OAAO,GAAG,IAAI,CAAC;AACtC,SAAA;QACD,IAAI,cAAc,CAAC,qBAAqB,EAAE;AACxC,YAAA,qBAAqB,CAAC,qBAAqB,GAAG,IAAI,CAAC;AACpD,SAAA;QACD,IAAI,cAAc,CAAC,eAAe,EAAE;AAClC,YAAA,qBAAqB,CAAC,eAAe,GAAG,IAAI,CAAC;AAC9C,SAAA;AACD,QAAA,OAAO,qBAAqB,CAAC;KAC9B;AAmED;;;;;;;;;AASG;IACI,QAAQ,GAAA;;;;QAIb,MAAM,WAAW,GAAa,EAAE,CAAC;QACjC,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,aAAa,EAAE;AACtB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,GAAG,EAAE;AACZ,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,qBAAqB,EAAE;AAC9B,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;QACD,IAAI,IAAI,CAAC,eAAe,EAAE;AACxB,YAAA,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACvB,SAAA;AACD,QAAA,OAAO,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC7B;AACF;;ACjPD;AACA;AAEA;;;;;;;;AAQG;MACU,uBAAuB,CAAA;AAApC,IAAA,WAAA,GAAA;AA6BE;;AAEG;QACI,IAAO,CAAA,OAAA,GAAY,KAAK,CAAC;AAEhC;;AAEG;QACI,IAAS,CAAA,SAAA,GAAY,KAAK,CAAC;AAElC;;AAEG;QACI,IAAM,CAAA,MAAA,GAAY,KAAK,CAAC;KAqBhC;AA9DC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,aAAqB,EAAA;AACvC,QAAA,MAAM,uBAAuB,GAAG,IAAI,uBAAuB,EAAE,CAAC;AAE9D,QAAA,KAAK,MAAM,CAAC,IAAI,aAAa,EAAE;AAC7B,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,OAAO,GAAG,IAAI,CAAC;oBACvC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,SAAS,GAAG,IAAI,CAAC;oBACzC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,uBAAuB,CAAC,MAAM,GAAG,IAAI,CAAC;oBACtC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,0BAA0B,CAAC,CAAA,CAAE,CAAC,CAAC;AACvD,aAAA;AACF,SAAA;AAED,QAAA,OAAO,uBAAuB,CAAC;KAChC;AAiBD;;;;;AAKG;IACI,QAAQ,GAAA;QACb,MAAM,aAAa,GAAa,EAAE,CAAC;QACnC,IAAI,IAAI,CAAC,OAAO,EAAE;AAChB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,SAAS,EAAE;AAClB,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,IAAI,CAAC,MAAM,EAAE;AACf,YAAA,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACzB,SAAA;AACD,QAAA,OAAO,aAAa,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC/B;AACF;;AC3ED;AACA;AAEA;;;;;;;;AAQG;MACU,kBAAkB,CAAA;AAA/B,IAAA,WAAA,GAAA;AAgCE;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAI,CAAA,IAAA,GAAY,KAAK,CAAC;AAE7B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;AAE9B;;AAEG;QACI,IAAK,CAAA,KAAA,GAAY,KAAK,CAAC;KAsB/B;AAvEC;;;;;AAKG;IACI,OAAO,KAAK,CAAC,QAAgB,EAAA;AAClC,QAAA,MAAM,kBAAkB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEpD,QAAA,KAAK,MAAM,CAAC,IAAI,QAAQ,EAAE;AACxB,YAAA,QAAQ,CAAC;AACP,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,IAAI,GAAG,IAAI,CAAC;oBAC/B,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA,KAAK,GAAG;AACN,oBAAA,kBAAkB,CAAC,KAAK,GAAG,IAAI,CAAC;oBAChC,MAAM;AACR,gBAAA;AACE,oBAAA,MAAM,IAAI,UAAU,CAAC,8BAA8B,CAAC,CAAA,CAAE,CAAC,CAAC;AAC3D,aAAA;AACF,SAAA;AAED,QAAA,OAAO,kBAAkB,CAAC;KAC3B;AAsBD;;;AAGG;IACI,QAAQ,GAAA;QACb,MAAM,QAAQ,GAAa,EAAE,CAAC;QAC9B,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,KAAK,EAAE;AACd,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;QACD,IAAI,IAAI,CAAC,IAAI,EAAE;AACb,YAAA,QAAQ,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACpB,SAAA;AACD,QAAA,OAAO,QAAQ,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KAC1B;AACF;;ACpFD;AA6EA;;;;;;;;;;AAUG;AACa,SAAA,iCAAiC,CAC/C,yBAAoD,EACpD,mBAA+C,EAAA;AAE/C,IAAA,MAAM,OAAO,GAAG,yBAAyB,CAAC,OAAO;UAC7C,yBAAyB,CAAC,OAAO;UACjC,eAAe,CAAC;IAEpB,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,qBAAqB;QAC3D,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,aAAa;QACnD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,eAAe;QACrD,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,GAAG;QACzC,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;IAED,IACE,yBAAyB,CAAC,WAAW;QACrC,yBAAyB,CAAC,WAAW,CAAC,MAAM;QAC5C,OAAO,GAAG,YAAY,EACtB;AACA,QAAA,MAAM,UAAU,CAAC,iEAAiE,CAAC,CAAC;AACrF,KAAA;AAED,IAAA,IAAI,yBAAyB,CAAC,eAAe,IAAI,OAAO,GAAG,YAAY,EAAE;AACvE,QAAA,MAAM,UAAU,CAAC,2EAA2E,CAAC,CAAC;AAC/F,KAAA;AAED,IAAA,MAAM,iBAAiB,GAAG,qBAAqB,CAAC,KAAK,CACnD,yBAAyB,CAAC,WAAW,CAAC,QAAQ,EAAE,CACjD,CAAC;AACF,IAAA,MAAM,cAAc,GAAG,kBAAkB,CAAC,KAAK,CAAC,yBAAyB,CAAC,QAAQ,CAAC,CAAC,QAAQ,EAAE,CAAC;AAC/F,IAAA,MAAM,mBAAmB,GAAG,uBAAuB,CAAC,KAAK,CACvD,yBAAyB,CAAC,aAAa,CACxC,CAAC,QAAQ,EAAE,CAAC;AAEb,IAAA,IAAI,YAAoB,CAAC;IAEzB,IAAI,OAAO,IAAI,YAAY,EAAE;AAC3B,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;YACP,yBAAyB,CAAC,eAAe,GAAG,yBAAyB,CAAC,eAAe,GAAG,EAAE;AAC1F,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;AAAM,SAAA;AACL,QAAA,YAAY,GAAG;AACb,YAAA,mBAAmB,CAAC,WAAW;YAC/B,iBAAiB;YACjB,cAAc;YACd,mBAAmB;AACnB,YAAA,yBAAyB,CAAC,QAAQ;kBAC9B,oBAAoB,CAAC,yBAAyB,CAAC,QAAQ,EAAE,KAAK,CAAC;AACjE,kBAAE,EAAE;AACN,YAAA,oBAAoB,CAAC,yBAAyB,CAAC,SAAS,EAAE,KAAK,CAAC;AAChE,YAAA,yBAAyB,CAAC,OAAO,GAAG,eAAe,CAAC,yBAAyB,CAAC,OAAO,CAAC,GAAG,EAAE;YAC3F,yBAAyB,CAAC,QAAQ,GAAG,yBAAyB,CAAC,QAAQ,GAAG,EAAE;YAC5E,OAAO;AACP,YAAA,EAAE;AACH,SAAA,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AACd,KAAA;IAED,MAAM,SAAS,GAAW,mBAAmB,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;IAE9E,OAAO,IAAI,kBAAkB,CAC3B,OAAO,EACP,SAAS,EACT,iBAAiB,CAAC,QAAQ,EAAE,EAC5B,cAAc,EACd,mBAAmB,EACnB,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,QAAQ,EAClC,yBAAyB,CAAC,SAAS,EACnC,yBAAyB,CAAC,OAAO,EACjC,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,SAAS,EACT,yBAAyB,CAAC,eAAe,CAC1C,CAAC;AACJ;;ACqJA;;;AAGG;AACG,MAAO,iBAAkB,SAAQ,aAAa,CAAA;IAuGlD,WACE,CAAA,GAAW,EACX,oBAIgB;;;IAGhB,OAAgC,EAAA;AAEhC,QAAA,IAAI,QAAsB,CAAC;AAC3B,QAAA,IAAI,cAAc,CAAC,oBAAoB,CAAC,EAAE;YACxC,QAAQ,GAAG,oBAAoB,CAAC;AACjC,SAAA;AAAM,aAAA,IACL,CAACC,eAAM,IAAI,oBAAoB,YAAY,0BAA0B;AACrE,YAAA,oBAAoB,YAAY,mBAAmB;YACnDe,0BAAiB,CAAC,oBAAoB,CAAC,EACvC;AACA,YAAA,QAAQ,GAAG,WAAW,CAAC,oBAAoB,EAAE,OAAO,CAAC,CAAC;AACvD,SAAA;AAAM,aAAA;;YAEL,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AAC5D,SAAA;AACD,QAAA,KAAK,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACrB,IAAI,CAAC,cAAc,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC;KAC9D;AA3HD;;;;;;;;;;;AAWG;IACI,OAAO,oBAAoB,CAChC,gBAAwB;;;IAGxB,OAAgC,EAAA;AAEhC,QAAA,OAAO,GAAG,OAAO,IAAI,EAAE,CAAC;AACxB,QAAA,MAAM,cAAc,GAAG,4BAA4B,CAAC,gBAAgB,CAAC,CAAC;AACtE,QAAA,IAAI,cAAc,CAAC,IAAI,KAAK,mBAAmB,EAAE;AAC/C,YAAA,IAAIf,eAAM,EAAE;AACV,gBAAA,MAAM,mBAAmB,GAAG,IAAI,0BAA0B,CACxD,cAAc,CAAC,WAAY,EAC3B,cAAc,CAAC,UAAU,CAC1B,CAAC;AAEF,gBAAA,IAAI,CAAC,OAAO,CAAC,YAAY,EAAE;oBACzB,OAAO,CAAC,YAAY,GAAG+B,gCAAuB,CAAC,cAAc,CAAC,QAAQ,CAAC,CAAC;AACzE,iBAAA;gBAED,MAAM,QAAQ,GAAG,WAAW,CAAC,mBAAmB,EAAE,OAAO,CAAC,CAAC;gBAC3D,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;AAC5D,aAAA;AAAM,iBAAA;AACL,gBAAA,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAC;AACvF,aAAA;AACF,SAAA;AAAM,aAAA,IAAI,cAAc,CAAC,IAAI,KAAK,eAAe,EAAE;YAClD,MAAM,QAAQ,GAAG,WAAW,CAAC,IAAI,mBAAmB,EAAE,EAAE,OAAO,CAAC,CAAC;AACjE,YAAA,OAAO,IAAI,iBAAiB,CAAC,cAAc,CAAC,GAAG,GAAG,GAAG,GAAG,cAAc,CAAC,UAAU,EAAE,QAAQ,CAAC,CAAC;AAC9F,SAAA;AAAM,aAAA;AACL,YAAA,MAAM,IAAI,KAAK,CACb,0FAA0F,CAC3F,CAAC;AACH,SAAA;KACF;AAiFD;;;;;;;;;;;AAWG;AACI,IAAA,kBAAkB,CAAC,aAAqB,EAAA;AAC7C,QAAA,OAAO,IAAI,eAAe,CACxB,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,kBAAkB,CAAC,aAAa,CAAC,CAAC,EAC5D,IAAI,CAAC,QAAQ,CACd,CAAC;KACH;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAkC,EAAE,EAAA;AAKpC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;YAC/D,MAAM,uBAAuB,GAAG,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;YAC7E,OAAO;gBACL,eAAe;gBACf,uBAAuB;aACxB,CAAC;AACH,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEV,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;AACI,IAAA,MAAM,eAAe,CAC1B,aAAqB,EACrB,UAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;AAC/D,YAAA,OAAO,MAAM,eAAe,CAAC,MAAM,CAAC,cAAc,CAAC,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;IACI,MAAM,iBAAiB,CAC5B,oBAA4B,EAC5B,uBAA+B,EAC/B,UAA2C,EAAE,EAAA;AAK7C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,qCAAqC,EAAE,OAAO,CAAC,CAAC;QAC5F,IAAI;AACF,YAAA,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAC7C,OAAO,CAAC,wBAAwB,IAAI,oBAAoB,CACzD,CAAC;;YAEF,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;AAChF,YAAA,MAAM,yBAAyB,GAAG,MAAM,gBAAgB,CAAC,OAAO,iBAC9D,oBAAoB;gBACpB,uBAAuB,EAAA,EACpB,cAAc,CAAA,CACjB,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,yBAAyB,EAAE,CAAC;AACvD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;;;IAGK,MAAM,eAAe,CAC3B,mBAA2B,EAC3B,wBAAgC,EAChC,UAAyC,EAAE,EAAA;;AAK3C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,mCAAmC,EAAE,OAAO,CAAC,CAAC;QAC1F,IAAI;YACF,MAAM,eAAe,GAAG,IAAI,CAAC,kBAAkB,CAAC,wBAAwB,CAAC,CAAC;;YAE1E,MAAM,gBAAgB,GAAG,IAAI,SAAS,CAAC,eAAe,CAAC,sBAAsB,CAAC,CAAC,CAAC;YAChF,MAAM,uBAAuB,GAAG,MAAM,gBAAgB,CAAC,MAAM,CAAC,mBAAmB,EAC5E,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAA,cAAc,KACjB,aAAa,EAAE,MAAA,OAAO,CAAC,eAAe,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,OAAO,IAC/C,CAAC;AACH,YAAA,OAAO,EAAE,eAAe,EAAE,uBAAuB,EAAE,CAAC;AACrD,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;AAOG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,UAAiC,EACjC,UAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,CAAC,UAAU,EACvD,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;AAQG;AACI,IAAA,MAAM,aAAa,CACxB,OAAA,GAAuC,EAAE,EAAA;AAEzC,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,iCAAiC,EAAE,OAAO,CAAC,CAAC;QACxF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,aAAa,iBAC5C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;AASG;AACI,IAAA,MAAM,cAAc,CACzB,OAAA,GAAwC,EAAE,EAAA;AAE1C,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,kCAAkC,EAAE,OAAO,CAAC,CAAC;QACzF,IAAI;AACF,YAAA,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,cAAc,iBAC7C,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;AAaG;AACK,IAAA,MAAM,qBAAqB,CACjC,MAAe,EACf,UAA+C,EAAE,EAAA;AAEjD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,yCAAyC,EAAE,OAAO,CAAC,CAAC;QAEhG,IAAI;YACF,OAAO,MAAM,IAAI,CAAC,cAAc,CAAC,qBAAqB,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,MAAM,EACH,EAAA,OAAO,KACV,OAAO,EAAE,OAAO,OAAO,CAAC,OAAO,KAAK,QAAQ,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,GAAG,OAAO,CAAC,OAAO,EAAA,CAAA,EAC/E,kCAAkC,CAAC,cAAc,CAAC,CAAA,CACrD,CAAC;AACJ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;;;AAiBG;IACK,MAAM,sBAAsB,CAClC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;AAElD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CACzC,0CAA0C,EAC1C,OAAO,CACR,CAAC;QAEF,IAAI;AACF,YAAA,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,WAAW,CAAA,MAAA,CAAA,MAAA,CAAA,EACpD,WAAW,EAAE,OAAO,CAAC,WAAW,EAChC,KAAK,EAAE,sBAAsB,EAC7B,MAAM,EACN,WAAW,EAAE,OAAO,CAAC,WAAW,EAAA,EAC7B,kCAAkC,CAAC,cAAc,CAAC,EACrD,CAAC;YAEH,MAAM,eAAe,mCAChB,QAAQ,CAAA,EAAA,EACX,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,KAAK,EAAE,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,IAAI,KAAI;;oBACjC,IAAI,QAAQ,GAAG,EAAE,CAAC;oBAClB,IAAI,CAAA,CAAA,EAAA,GAAA,IAAI,CAAC,IAAI,MAAE,IAAA,IAAA,EAAA,KAAA,KAAA,CAAA,GAAA,KAAA,CAAA,GAAA,EAAA,CAAA,UAAU,CAAC,MAAM,MAAK,CAAC,EAAE;wBACtC,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC1C,qBAAA;AACD,oBAAA,OAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EAAY,IAAI,CAAA,EAAA,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,QAAQ,EAAG,CAAA,CAAA;iBACvD,CAAC,GACH,CAAC;AACF,YAAA,OAAO,eAAe,CAAC;AACxB,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEA,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;;;;;;;;;;AAeG;AACY,IAAA,uBAAuB,CACpC,sBAA8B,EAC9B,MAAe,EACf,UAAgD,EAAE,EAAA;;AAElD,YAAA,IAAI,QAAQ,CAAC;AACb,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;AACD,oBAAA,QAAQ,GAAG,MAAAE,aAAA,CAAM,IAAI,CAAC,sBAAsB,CAAC,sBAAsB,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;oBACtF,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC,KAAK,IAAI,EAAE,CAAC;AACtC,oBAAA,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC;oBACpC,MAAM,MAAAA,aAAA,CAAA,QAAQ,CAAA,CAAC;AAChB,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;AAQG;AACY,IAAA,oBAAoB,CACjC,sBAA8B,EAC9B,OAAA,GAAgD,EAAE,EAAA;;;AAElD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,uBAAuB,CACtD,sBAAsB,EACtB,MAAM,EACN,OAAO,CACR,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAJU,MAAM,OAAO,WAAA,CAAA;oBAKtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,KAAK,CAAA,CAAA,CAAA,CAAC;AACtB,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA8EG;AACI,IAAA,eAAe,CACpB,sBAA8B,EAC9B,OAAA,GAAwC,EAAE,EAAA;;AAG1C,QAAA,MAAM,kBAAkB,GAAA,MAAA,CAAA,MAAA,CAAA,EAAA,EACnB,OAAO,CACX,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,oBAAoB,CAAC,sBAAsB,EAAE,kBAAkB,CAAC,CAAC;QACnF,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,uBAAuB,CAAC,sBAAsB,EAAE,QAAQ,CAAC,iBAAiB,EACpF,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;;AAWG;AACY,IAAA,YAAY,CACzB,MAAe,EACf,OAAA,GAA+C,EAAE,EAAA;;AAEjD,YAAA,IAAI,6BAA6B,CAAC;AAClC,YAAA,IAAI,CAAC,CAAC,MAAM,IAAI,MAAM,KAAK,SAAS,EAAE;gBACpC,GAAG;oBACD,6BAA6B,GAAG,MAAMT,aAAA,CAAA,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,CAAC;AAClF,oBAAA,6BAA6B,CAAC,cAAc;AAC1C,wBAAA,6BAA6B,CAAC,cAAc,IAAI,EAAE,CAAC;AACrD,oBAAA,MAAM,GAAG,6BAA6B,CAAC,iBAAiB,CAAC;AACzD,oBAAA,MAAA,MAAAA,aAAA,CAAM,MAAMA,aAAA,CAAA,6BAA6B,CAAA,CAAA,CAAC;AAC3C,iBAAA,QAAQ,MAAM,EAAE;AAClB,aAAA;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;AAIG;IACY,SAAS,CACtB,UAA+C,EAAE,EAAA;;;AAEjD,YAAA,IAAI,MAA0B,CAAC;;AAC/B,gBAAA,KAA4B,IAAA,EAAA,GAAAS,mBAAA,CAAA,IAAI,CAAC,YAAY,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA,EAAA,EAAA,EAAA,EAAA,GAAA,MAAAT,aAAA,CAAA,EAAA,CAAA,IAAA,EAAA,CAAA,EAAA,CAAA,EAAA,CAAA,IAAA,GAAA;oBAAnD,MAAM,OAAO,WAAA,CAAA;oBACtB,MAAAA,aAAA,CAAA,OAAOU,sBAAA,CAAAD,mBAAA,CAAA,OAAO,CAAC,cAAc,CAAA,CAAA,CAAA,CAAC;AAC/B,iBAAA;;;;;;;;;SACF,CAAA,CAAA;AAAA,KAAA;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAyEG;IACI,cAAc,CACnB,UAAwC,EAAE,EAAA;AAE1C,QAAA,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;AACzB,YAAA,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;AAC5B,SAAA;QAED,MAAM,OAAO,GAAgC,EAAE,CAAC;QAChD,IAAI,OAAO,CAAC,cAAc,EAAE;AAC1B,YAAA,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;AACzB,SAAA;QACD,IAAI,OAAO,CAAC,eAAe,EAAE;AAC3B,YAAA,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;AAC1B,SAAA;QACD,IAAI,OAAO,CAAC,aAAa,EAAE;AACzB,YAAA,OAAO,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;AACxB,SAAA;;QAGD,MAAM,kBAAkB,mCACnB,OAAO,CAAA,GACN,OAAO,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,OAAO,EAAE,GAAG,EAAE,EAC1C,CAAC;QAEF,MAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,kBAAkB,CAAC,CAAC;QAChD,OAAO;AACL;;AAEG;YACH,IAAI,GAAA;AACF,gBAAA,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;aACpB;AACD;;AAEG;YACH,CAAC,MAAM,CAAC,aAAa,CAAC,GAAA;AACpB,gBAAA,OAAO,IAAI,CAAC;aACb;AACD;;AAEG;AACH,YAAA,MAAM,EAAE,CAAC,QAAyB,GAAA,EAAE,KAAI;AACtC,gBAAA,OAAO,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,iBAAiB,EAAA,MAAA,CAAA,MAAA,CAAA,EACjD,WAAW,EAAE,QAAQ,CAAC,WAAW,EAC9B,EAAA,kBAAkB,EACrB,CAAC;aACJ;SACF,CAAC;KACH;AAED;;;;;;;;;;AAUG;IACI,MAAM,oBAAoB,CAC/B,QAAc,EACd,SAAe,EACf,UAA8C,EAAE,EAAA;AAEhD,QAAA,MAAM,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,UAAU,CAAC,wCAAwC,EAAE,OAAO,CAAC,CAAC;QAC/F,IAAI;YACF,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,cAAc,CAAC,oBAAoB,CAC7D;AACE,gBAAA,QAAQ,EAAE,oBAAoB,CAAC,QAAQ,EAAE,KAAK,CAAC;AAC/C,gBAAA,SAAS,EAAE,oBAAoB,CAAC,SAAS,EAAE,KAAK,CAAC;aAClD,EAEC,MAAA,CAAA,MAAA,CAAA,EAAA,WAAW,EAAE,OAAO,CAAC,WAAW,EAC7B,EAAA,kCAAkC,CAAC,cAAc,CAAC,CAAA,CAExD,CAAC;AAEF,YAAA,MAAM,iBAAiB,GAAG;gBACxB,cAAc,EAAE,QAAQ,CAAC,cAAc;gBACvC,cAAc,EAAE,QAAQ,CAAC,cAAc;AACvC,gBAAA,cAAc,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,cAAc,CAAC;AACjD,gBAAA,eAAe,EAAE,IAAI,IAAI,CAAC,QAAQ,CAAC,eAAe,CAAC;gBACnD,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,aAAa,EAAE,QAAQ,CAAC,aAAa;gBACrC,KAAK,EAAE,QAAQ,CAAC,KAAK;aACtB,CAAC;AAEF,YAAA,MAAM,GAAG,GACP,MAAA,CAAA,MAAA,CAAA,EAAA,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC7B,eAAe,EAAE,QAAQ,CAAC,eAAe,EACzC,OAAO,EAAE,QAAQ,CAAC,OAAO,EACzB,IAAI,EAAE,QAAQ,CAAC,IAAI,EACnB,SAAS,EAAE,QAAQ,CAAC,SAAS,EAC1B,EAAA,iBAAiB,CACrB,CAAC;AAEF,YAAA,OAAO,GAAG,CAAC;AACZ,SAAA;AAAC,QAAA,OAAO,CAAM,EAAE;YACf,IAAI,CAAC,SAAS,CAAC;gBACb,IAAI,EAAEX,0BAAc,CAAC,KAAK;gBAC1B,OAAO,EAAE,CAAC,CAAC,OAAO;AACnB,aAAA,CAAC,CAAC;AACH,YAAA,MAAM,CAAC,CAAC;AACT,SAAA;AAAS,gBAAA;YACR,IAAI,CAAC,GAAG,EAAE,CAAC;AACZ,SAAA;KACF;AAED;;;;;;AAMG;IACI,kBAAkB,GAAA;QACvB,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAC;KACrD;AAED;;;;;;;;;;;;;AAaG;AACI,IAAA,qBAAqB,CAC1B,SAAgB,EAChB,WAAA,GAAqC,qBAAqB,CAAC,KAAK,CAAC,GAAG,CAAC,EACrE,aAAA,GAAwB,KAAK,EAC7B,UAA+C,EAAE,EAAA;QAEjD,IAAI,EAAE,IAAI,CAAC,UAAU,YAAY,0BAA0B,CAAC,EAAE;AAC5D,YAAA,MAAM,UAAU,CACd,+FAA+F,CAChG,CAAC;AACH,SAAA;QAED,IAAI,SAAS,KAAK,SAAS,EAAE;AAC3B,YAAA,MAAM,GAAG,GAAG,IAAI,IAAI,EAAE,CAAC;AACvB,YAAA,SAAS,GAAG,IAAI,IAAI,CAAC,GAAG,CAAC,OAAO,EAAE,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC;AACnD,SAAA;AAED,QAAA,MAAM,GAAG,GAAG,iCAAiC,CAAA,MAAA,CAAA,MAAA,CAAA,EAEzC,WAAW;YACX,SAAS;YACT,aAAa,EACb,QAAQ,EAAE,kBAAkB,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,EAC/C,EAAA,OAAO,GAEZ,IAAI,CAAC,UAAU,CAChB,CAAC,QAAQ,EAAE,CAAC;QAEb,OAAO,gBAAgB,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;KACxC;AACF;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt b/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt new file mode 100644 index 0000000000..0e42542369 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/CopyrightNotice.txt @@ -0,0 +1,15 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt b/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt new file mode 100644 index 0000000000..bfe6430cb0 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/LICENSE.txt @@ -0,0 +1,12 @@ +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/README.md b/node_modules/@azure/storage-blob/node_modules/tslib/README.md new file mode 100644 index 0000000000..72ff8e79af --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/README.md @@ -0,0 +1,164 @@ +# tslib + +This is a runtime library for [TypeScript](http://www.typescriptlang.org/) that contains all of the TypeScript helper functions. + +This library is primarily used by the `--importHelpers` flag in TypeScript. +When using `--importHelpers`, a module that uses helper functions like `__extends` and `__assign` in the following emitted file: + +```ts +var __assign = (this && this.__assign) || Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; +}; +exports.x = {}; +exports.y = __assign({}, exports.x); + +``` + +will instead be emitted as something like the following: + +```ts +var tslib_1 = require("tslib"); +exports.x = {}; +exports.y = tslib_1.__assign({}, exports.x); +``` + +Because this can avoid duplicate declarations of things like `__extends`, `__assign`, etc., this means delivering users smaller files on average, as well as less runtime overhead. +For optimized bundles with TypeScript, you should absolutely consider using `tslib` and `--importHelpers`. + +# Installing + +For the latest stable version, run: + +## npm + +```sh +# TypeScript 3.9.2 or later +npm install tslib + +# TypeScript 3.8.4 or earlier +npm install tslib@^1 + +# TypeScript 2.3.2 or earlier +npm install tslib@1.6.1 +``` + +## yarn + +```sh +# TypeScript 3.9.2 or later +yarn add tslib + +# TypeScript 3.8.4 or earlier +yarn add tslib@^1 + +# TypeScript 2.3.2 or earlier +yarn add tslib@1.6.1 +``` + +## bower + +```sh +# TypeScript 3.9.2 or later +bower install tslib + +# TypeScript 3.8.4 or earlier +bower install tslib@^1 + +# TypeScript 2.3.2 or earlier +bower install tslib@1.6.1 +``` + +## JSPM + +```sh +# TypeScript 3.9.2 or later +jspm install tslib + +# TypeScript 3.8.4 or earlier +jspm install tslib@^1 + +# TypeScript 2.3.2 or earlier +jspm install tslib@1.6.1 +``` + +# Usage + +Set the `importHelpers` compiler option on the command line: + +``` +tsc --importHelpers file.ts +``` + +or in your tsconfig.json: + +```json +{ + "compilerOptions": { + "importHelpers": true + } +} +``` + +#### For bower and JSPM users + +You will need to add a `paths` mapping for `tslib`, e.g. For Bower users: + +```json +{ + "compilerOptions": { + "module": "amd", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["bower_components/tslib/tslib.d.ts"] + } + } +} +``` + +For JSPM users: + +```json +{ + "compilerOptions": { + "module": "system", + "importHelpers": true, + "baseUrl": "./", + "paths": { + "tslib" : ["jspm_packages/npm/tslib@2.x.y/tslib.d.ts"] + } + } +} +``` + +## Deployment + +- Choose your new version number +- Set it in `package.json` and `bower.json` +- Create a tag: `git tag [version]` +- Push the tag: `git push --tags` +- Create a [release in GitHub](https://github.com/microsoft/tslib/releases) +- Run the [publish to npm](https://github.com/microsoft/tslib/actions?query=workflow%3A%22Publish+to+NPM%22) workflow + +Done. + +# Contribute + +There are many ways to [contribute](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md) to TypeScript. + +* [Submit bugs](https://github.com/Microsoft/TypeScript/issues) and help us verify fixes as they are checked in. +* Review the [source code changes](https://github.com/Microsoft/TypeScript/pulls). +* Engage with other TypeScript users and developers on [StackOverflow](http://stackoverflow.com/questions/tagged/typescript). +* Join the [#typescript](http://twitter.com/#!/search/realtime/%23typescript) discussion on Twitter. +* [Contribute bug fixes](https://github.com/Microsoft/TypeScript/blob/master/CONTRIBUTING.md). + +# Documentation + +* [Quick tutorial](http://www.typescriptlang.org/Tutorial) +* [Programming handbook](http://www.typescriptlang.org/Handbook) +* [Homepage](http://www.typescriptlang.org/) diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js b/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js new file mode 100644 index 0000000000..aaac8bf970 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/modules/index.js @@ -0,0 +1,55 @@ +import tslib from '../tslib.js'; +const { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +} = tslib; +export { + __extends, + __assign, + __rest, + __decorate, + __param, + __metadata, + __awaiter, + __generator, + __exportStar, + __createBinding, + __values, + __read, + __spread, + __spreadArrays, + __spreadArray, + __await, + __asyncGenerator, + __asyncDelegator, + __asyncValues, + __makeTemplateObject, + __importStar, + __importDefault, + __classPrivateFieldGet, + __classPrivateFieldSet, + __classPrivateFieldIn, +}; diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json b/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json new file mode 100644 index 0000000000..aead43de36 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/modules/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/package.json b/node_modules/@azure/storage-blob/node_modules/tslib/package.json new file mode 100644 index 0000000000..d8a9b11e63 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/package.json @@ -0,0 +1,38 @@ +{ + "name": "tslib", + "author": "Microsoft Corp.", + "homepage": "https://www.typescriptlang.org/", + "version": "2.4.0", + "license": "0BSD", + "description": "Runtime library for TypeScript helper functions", + "keywords": [ + "TypeScript", + "Microsoft", + "compiler", + "language", + "javascript", + "tslib", + "runtime" + ], + "bugs": { + "url": "https://github.com/Microsoft/TypeScript/issues" + }, + "repository": { + "type": "git", + "url": "https://github.com/Microsoft/tslib.git" + }, + "main": "tslib.js", + "module": "tslib.es6.js", + "jsnext:main": "tslib.es6.js", + "typings": "tslib.d.ts", + "sideEffects": false, + "exports": { + ".": { + "module": "./tslib.es6.js", + "import": "./modules/index.js", + "default": "./tslib.js" + }, + "./*": "./*", + "./": "./" + } +} diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts new file mode 100644 index 0000000000..b8e49f088a --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.d.ts @@ -0,0 +1,398 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ + +/** + * Used to shim class extends. + * + * @param d The derived class. + * @param b The base class. + */ +export declare function __extends(d: Function, b: Function): void; + +/** + * Copy the values of all of the enumerable own properties from one or more source objects to a + * target object. Returns the target object. + * + * @param t The target object to copy to. + * @param sources One or more source objects from which to copy properties + */ +export declare function __assign(t: any, ...sources: any[]): any; + +/** + * Performs a rest spread on an object. + * + * @param t The source value. + * @param propertyNames The property names excluded from the rest spread. + */ +export declare function __rest(t: any, propertyNames: (string | symbol)[]): any; + +/** + * Applies decorators to a target object + * + * @param decorators The set of decorators to apply. + * @param target The target object. + * @param key If specified, the own property to apply the decorators to. + * @param desc The property descriptor, defaults to fetching the descriptor from the target object. + * @experimental + */ +export declare function __decorate(decorators: Function[], target: any, key?: string | symbol, desc?: any): any; + +/** + * Creates an observing function decorator from a parameter decorator. + * + * @param paramIndex The parameter index to apply the decorator to. + * @param decorator The parameter decorator to apply. Note that the return value is ignored. + * @experimental + */ +export declare function __param(paramIndex: number, decorator: Function): Function; + +/** + * Creates a decorator that sets metadata. + * + * @param metadataKey The metadata key + * @param metadataValue The metadata value + * @experimental + */ +export declare function __metadata(metadataKey: any, metadataValue: any): Function; + +/** + * Converts a generator function into a pseudo-async function, by treating each `yield` as an `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param P The optional promise constructor argument, defaults to the `Promise` property of the global object. + * @param generator The generator function + */ +export declare function __awaiter(thisArg: any, _arguments: any, P: Function, generator: Function): any; + +/** + * Creates an Iterator object using the body as the implementation. + * + * @param thisArg The reference to use as the `this` value in the function + * @param body The generator state-machine based implementation. + * + * @see [./docs/generator.md] + */ +export declare function __generator(thisArg: any, body: Function): any; + +/** + * Creates bindings for all enumerable properties of `m` on `exports` + * + * @param m The source object + * @param exports The `exports` object. + */ +export declare function __exportStar(m: any, o: any): void; + +/** + * Creates a value iterator from an `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `Iterable`, nor an `ArrayLike`. + */ +export declare function __values(o: any): any; + +/** + * Reads values from an `Iterable` or `ArrayLike` object and returns the resulting array. + * + * @param o The object to read from. + * @param n The maximum number of arguments to read, defaults to `Infinity`. + */ +export declare function __read(o: any, n?: number): any[]; + +/** + * Creates an array from iterable spread. + * + * @param args The Iterable objects to spread. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spread(...args: any[][]): any[]; + +/** + * Creates an array from array spread. + * + * @param args The ArrayLikes to spread into the resulting array. + * @deprecated since TypeScript 4.2 - Use `__spreadArray` + */ +export declare function __spreadArrays(...args: any[][]): any[]; + +/** + * Spreads the `from` array into the `to` array. + * + * @param pack Replace empty elements with `undefined`. + */ +export declare function __spreadArray(to: any[], from: any[], pack?: boolean): any[]; + +/** + * Creates an object that signals to `__asyncGenerator` that it shouldn't be yielded, + * and instead should be awaited and the resulting value passed back to the generator. + * + * @param v The value to await. + */ +export declare function __await(v: any): any; + +/** + * Converts a generator function into an async generator function, by using `yield __await` + * in place of normal `await`. + * + * @param thisArg The reference to use as the `this` value in the generator function + * @param _arguments The optional arguments array + * @param generator The generator function + */ +export declare function __asyncGenerator(thisArg: any, _arguments: any, generator: Function): any; + +/** + * Used to wrap a potentially async iterator in such a way so that it wraps the result + * of calling iterator methods of `o` in `__await` instances, and then yields the awaited values. + * + * @param o The potentially async iterator. + * @returns A synchronous iterator yielding `__await` instances on every odd invocation + * and returning the awaited `IteratorResult` passed to `next` every even invocation. + */ +export declare function __asyncDelegator(o: any): any; + +/** + * Creates a value async iterator from an `AsyncIterable`, `Iterable` or `ArrayLike` object. + * + * @param o The object. + * @throws {TypeError} If `o` is neither `AsyncIterable`, `Iterable`, nor an `ArrayLike`. + */ +export declare function __asyncValues(o: any): any; + +/** + * Creates a `TemplateStringsArray` frozen object from the `cooked` and `raw` arrays. + * + * @param cooked The cooked possibly-sparse array. + * @param raw The raw string content. + */ +export declare function __makeTemplateObject(cooked: string[], raw: string[]): TemplateStringsArray; + +/** + * Used to shim default and named imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default, { Named, Other } from "mod"; + * // or + * import { default as Default, Named, Other } from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importStar(mod: T): T; + +/** + * Used to shim default imports in ECMAScript Modules transpiled to CommonJS. + * + * ```js + * import Default from "mod"; + * ``` + * + * @param mod The CommonJS module exports object. + */ +export declare function __importDefault(mod: T): T | { default: T }; + +/** + * Emulates reading a private instance field. + * + * @param receiver The instance from which to read the private field. + * @param state A WeakMap containing the private field value for an instance. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean, get(o: T): V | undefined }, + kind?: "f" +): V; + +/** + * Emulates reading a private static field. + * + * @param receiver The object from which to read the private static field. + * @param state The class constructor containing the definition of the static field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates evaluating a private instance "get" accessor. + * + * @param receiver The instance on which to evaluate the private "get" accessor. + * @param state A WeakSet used to verify an instance supports the private "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet( + receiver: T, + state: { has(o: T): boolean }, + kind: "a", + f: () => V +): V; + +/** + * Emulates evaluating a private static "get" accessor. + * + * @param receiver The object on which to evaluate the private static "get" accessor. + * @param state The class constructor containing the definition of the static "get" accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "get" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V>( + receiver: T, + state: T, + kind: "a", + f: () => V +): V; + +/** + * Emulates reading a private instance method. + * + * @param receiver The instance from which to read a private method. + * @param state A WeakSet used to verify an instance supports the private method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private instance method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldGet unknown>( + receiver: T, + state: { has(o: T): boolean }, + kind: "m", + f: V +): V; + +/** + * Emulates reading a private static method. + * + * @param receiver The object from which to read the private static method. + * @param state The class constructor containing the definition of the static method. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The function to return as the private static method. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldGet unknown, V extends (...args: any[]) => unknown>( + receiver: T, + state: T, + kind: "m", + f: V +): V; + +/** + * Emulates writing to a private instance field. + * + * @param receiver The instance on which to set a private field value. + * @param state A WeakMap used to store the private field value for an instance. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean, set(o: T, value: V): unknown }, + value: V, + kind?: "f" +): V; + +/** + * Emulates writing to a private static field. + * + * @param receiver The object on which to set the private static field. + * @param state The class constructor containing the definition of the private static field. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The descriptor that holds the static field value. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "f", + f: { value: V } +): V; + +/** + * Emulates writing to a private instance "set" accessor. + * + * @param receiver The instance on which to evaluate the private instance "set" accessor. + * @param state A WeakSet used to verify an instance supports the private "set" accessor. + * @param value The value to store in the private accessor. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `state` doesn't have an entry for `receiver`. + */ +export declare function __classPrivateFieldSet( + receiver: T, + state: { has(o: T): boolean }, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Emulates writing to a private static "set" accessor. + * + * @param receiver The object on which to evaluate the private static "set" accessor. + * @param state The class constructor containing the definition of the static "set" accessor. + * @param value The value to store in the private field. + * @param kind Either `"f"` for a field, `"a"` for an accessor, or `"m"` for a method. + * @param f The "set" accessor function to evaluate. + * + * @throws {TypeError} If `receiver` is not `state`. + */ +export declare function __classPrivateFieldSet unknown, V>( + receiver: T, + state: T, + value: V, + kind: "a", + f: (v: V) => void +): V; + +/** + * Checks for the existence of a private field/method/accessor. + * + * @param state The class constructor containing the static member, or the WeakMap or WeakSet associated with a private instance member. + * @param receiver The object for which to test the presence of the private member. + */ +export declare function __classPrivateFieldIn( + state: (new (...args: any[]) => unknown) | { has(o: any): boolean }, + receiver: unknown, +): boolean; + +/** + * Creates a re-export binding on `object` with key `objectKey` that references `target[key]`. + * + * @param object The local `exports` object. + * @param target The object to re-export from. + * @param key The property key of `target` to re-export. + * @param objectKey The property key to re-export as. Defaults to `key`. + */ +export declare function __createBinding(object: object, target: object, key: PropertyKey, objectKey?: PropertyKey): void; diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html new file mode 100644 index 0000000000..b122e41b06 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js new file mode 100644 index 0000000000..e6d7777168 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.es6.js @@ -0,0 +1,248 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global Reflect, Promise */ + +var extendStatics = function(d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + return extendStatics(d, b); +}; + +export function __extends(d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); +} + +export var __assign = function() { + __assign = Object.assign || function __assign(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + } + return __assign.apply(this, arguments); +} + +export function __rest(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; +} + +export function __decorate(decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; +} + +export function __param(paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } +} + +export function __metadata(metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); +} + +export function __awaiter(thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +} + +export function __generator(thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +} + +export var __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +}); + +export function __exportStar(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); +} + +export function __values(o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); +} + +export function __read(o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; +} + +/** @deprecated */ +export function __spread() { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; +} + +/** @deprecated */ +export function __spreadArrays() { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; +} + +export function __spreadArray(to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); +} + +export function __await(v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); +} + +export function __asyncGenerator(thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +} + +export function __asyncDelegator(o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } +} + +export function __asyncValues(o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +} + +export function __makeTemplateObject(cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; +}; + +var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}; + +export function __importStar(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +} + +export function __importDefault(mod) { + return (mod && mod.__esModule) ? mod : { default: mod }; +} + +export function __classPrivateFieldGet(receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +} + +export function __classPrivateFieldSet(receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +} + +export function __classPrivateFieldIn(state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); +} diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html new file mode 100644 index 0000000000..44c9ba51e3 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.html @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js new file mode 100644 index 0000000000..2b7885c1f3 --- /dev/null +++ b/node_modules/@azure/storage-blob/node_modules/tslib/tslib.js @@ -0,0 +1,317 @@ +/****************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __classPrivateFieldIn; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if (typeof module === "object" && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + __classPrivateFieldIn = function (state, receiver) { + if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); + return typeof state === "function" ? receiver === state : state.has(receiver); + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); + exporter("__classPrivateFieldIn", __classPrivateFieldIn); +}); diff --git a/node_modules/@azure/storage-blob/package.json b/node_modules/@azure/storage-blob/package.json new file mode 100644 index 0000000000..c3f11b209d --- /dev/null +++ b/node_modules/@azure/storage-blob/package.json @@ -0,0 +1,187 @@ +{ + "name": "@azure/storage-blob", + "sdk-type": "client", + "version": "12.11.0", + "description": "Microsoft Azure Storage SDK for JavaScript - Blob", + "main": "./dist/index.js", + "module": "./dist-esm/storage-blob/src/index.js", + "browser": { + "./dist-esm/storage-blob/src/index.js": "./dist-esm/storage-blob/src/index.browser.js", + "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js": "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js", + "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js": "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js", + "./dist-esm/storage-blob/src/utils/utils.node.js": "./dist-esm/storage-blob/src/utils/utils.browser.js", + "./dist-esm/storage-blob/test/utils/index.js": "./dist-esm/storage-blob/test/utils/index.browser.js", + "./dist-esm/storage-blob/src/BatchUtils.js": "./dist-esm/storage-blob/src/BatchUtils.browser.js", + "./dist-esm/storage-blob/src/BlobDownloadResponse.js": "./dist-esm/storage-blob/src/BlobDownloadResponse.browser.js", + "./dist-esm/storage-blob/src/BlobQueryResponse.js": "./dist-esm/storage-blob/src/BlobQueryResponse.browser.js", + "./dist-esm/storage-common/src/BufferScheduler.js": "./dist-esm/storage-common/src/BufferScheduler.browser.js", + "./dist-esm/storage-common/src/index.js": "./dist-esm/storage-common/src/index.browser.js", + "fs": false, + "os": false, + "process": false + }, + "react-native": { + "./dist/index.js": "./dist-esm/storage-blob/src/index.js" + }, + "types": "./types/latest/storage-blob.d.ts", + "typesVersions": { + "<3.6": { + "*": [ + "./types/3.1/storage-blob.d.ts" + ] + } + }, + "engines": { + "node": ">=12.0.0" + }, + "scripts": { + "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", + "build:browser": "tsc -p . && dev-tool run bundle", + "build:node": "tsc -p . && dev-tool run bundle", + "build:test": "tsc -p . && dev-tool run bundle", + "build:types": "downlevel-dts types/latest types/3.1", + "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "build:samples": "echo Obsolete;", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml", + "clean:samples": "rimraf samples/v12/javascript/node_modules samples/v12/typescript/node_modules samples/v12/typescript/dist samples/v12/typescript/package-lock.json samples/v12/javascript/package-lock.json", + "extract-api": "tsc -p . && api-extractor run --local", + "execute:samples": "dev-tool samples run samples-dev", + "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "karma start --single-run", + "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 300000 \"dist-esm/storage-blob/test/*.spec.js\" \"dist-esm/storage-blob/test/node/*.spec.js\"", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "generate:client": "autorest --typescript ./swagger/README.md", + "lint:fix": "eslint package.json api-extractor.json README.md src test --ext .ts,.javascript,.js --fix", + "lint": "eslint package.json api-extractor.json README.md src test --ext .ts,.javascript,.js", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser", + "test:node": "npm run clean && npm run build:test && npm run unit-test:node", + "test": "npm run clean && npm run build:test && npm run unit-test", + "unit-test:browser": "karma start --single-run", + "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"", + "unit-test": "npm run unit-test:node && npm run unit-test:browser", + "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node" + }, + "files": [ + "BreakingChanges.md", + "dist/", + "dist-esm/storage-blob/src/", + "dist-esm/storage-internal-avro/src/", + "dist-esm/storage-common/src/", + "types/latest/storage-blob.d.ts", + "types/3.1/storage-blob.d.ts", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "storage", + "blob", + "cloud", + "node.js", + "typescript", + "javascript", + "browser" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/storage/storage-blob/", + "sideEffects": false, + "//metadata": { + "constantPaths": [ + { + "path": "src/generated/src/storageClientContext.ts", + "prefix": "packageVersion" + }, + { + "path": "src/utils/constants.ts", + "prefix": "SDK_VERSION" + }, + { + "path": "swagger/README.md", + "prefix": "package-version" + } + ] + }, + "//sampleConfiguration": { + "skip": [ + "advancedRequestOptions.js", + "anonymousAuth.js", + "azureAdAuth.js", + "customPipeline.js", + "customizedClientHeaders.js", + "listBlobsByHierarchy.js", + "listBlobs.js", + "listContainers.js", + "snapshots.js", + "sharedKeyAuth.js" + ], + "productName": "Azure Storage Blob", + "productSlugs": [ + "azure", + "azure-storage" + ], + "requiredResources": { + "Azure Storage Account": "https://docs.microsoft.com/azure/storage/common/storage-account-overview" + } + }, + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-http": "^2.0.0", + "@azure/core-lro": "^2.2.0", + "@azure/core-paging": "^1.1.1", + "@azure/core-tracing": "1.0.0-preview.13", + "@azure/logger": "^1.0.0", + "events": "^3.0.0", + "tslib": "^2.2.0" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/identity": "^2.0.1", + "@azure/test-utils": "^1.0.0", + "@azure-tools/test-recorder": "^1.0.0", + "@azure/test-utils-perf": "^1.0.0", + "@microsoft/api-extractor": "7.18.11", + "@types/chai": "^4.1.6", + "@types/mocha": "^7.0.2", + "@types/node": "^12.0.0", + "@types/node-fetch": "^2.5.0", + "chai": "^4.2.0", + "cross-env": "^7.0.2", + "dotenv": "^8.2.0", + "downlevel-dts": "^0.8.0", + "es6-promise": "^4.2.5", + "eslint": "^8.0.0", + "esm": "^3.2.18", + "inherits": "^2.0.3", + "karma": "^6.2.0", + "karma-chrome-launcher": "^3.0.0", + "karma-coverage": "^2.0.0", + "karma-edge-launcher": "^0.4.2", + "karma-env-preprocessor": "^0.1.1", + "karma-firefox-launcher": "^1.1.0", + "karma-ie-launcher": "^1.0.0", + "karma-json-preprocessor": "^0.3.3", + "karma-json-to-file-reporter": "^1.0.1", + "karma-junit-reporter": "^2.0.1", + "karma-mocha": "^2.0.1", + "karma-mocha-reporter": "^2.2.5", + "karma-sourcemap-loader": "^0.3.8", + "mocha": "^7.1.1", + "mocha-junit-reporter": "^2.0.0", + "nyc": "^15.0.0", + "prettier": "^2.5.1", + "puppeteer": "^14.0.0", + "rimraf": "^3.0.0", + "source-map-support": "^0.5.9", + "ts-node": "^10.0.0", + "typescript": "~4.2.0", + "util": "^0.12.1" + } +} diff --git a/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts new file mode 100644 index 0000000000..19b0109afa --- /dev/null +++ b/node_modules/@azure/storage-blob/types/3.1/storage-blob.d.ts @@ -0,0 +1,9345 @@ +/// +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive"; +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool"; +export { BaseRequestPolicy }; +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /* + * The name of the blob. + */ + readonly name: string; + /* + * The name of the storage container the blob is associated with. + */ + readonly containerName: string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /* + * Gets the lease Id. + * + * @readonly + */ + readonly leaseId: string; + /* + * Gets the url. + * + * @readonly + */ + readonly url: string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} +export declare interface BlobPrefix { + name: string; +} +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +export declare interface BlobTag { + key: string; + value: string; +} +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; +declare interface ClearRange { + start: number; + end: number; +} +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /* + * The name of the container. + */ + readonly containerName: string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential }; +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; +export { deserializationPolicy }; +/** Defines values for EncryptionAlgorithmType. */ +export declare type EncryptionAlgorithmType = "AES256"; +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} +export { HttpHeaders }; +export { HttpOperationResponse }; +export { HttpRequestBody }; +export { IHttpClient }; +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} +declare interface PageRange { + start: number; + end: number; +} +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} +export { PollerLike }; +export { PollOperationState }; +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range }; +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; +export { RequestPolicy }; +export { RequestPolicyFactory }; +export { RequestPolicyOptions }; +export { RestError }; +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /* + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + readonly ipRange: SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + */ + audience?: string | string[]; +} +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} +/** + * Blob tags. + */ +export declare type Tags = Record; +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} +export { WebResource }; +export {}; diff --git a/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts new file mode 100644 index 0000000000..74f594b06f --- /dev/null +++ b/node_modules/@azure/storage-blob/types/latest/storage-blob.d.ts @@ -0,0 +1,9716 @@ +/// + +import { AbortSignalLike } from '@azure/abort-controller'; +import { AzureLogger } from '@azure/logger'; +import { BaseRequestPolicy } from '@azure/core-http'; +import * as coreHttp from '@azure/core-http'; +import { deserializationPolicy } from '@azure/core-http'; +import { HttpHeaders } from '@azure/core-http'; +import { HttpOperationResponse } from '@azure/core-http'; +import { HttpRequestBody } from '@azure/core-http'; +import { HttpResponse } from '@azure/core-http'; +import { HttpClient as IHttpClient } from '@azure/core-http'; +import { KeepAliveOptions } from '@azure/core-http'; +import { OperationTracingOptions } from '@azure/core-tracing'; +import { PagedAsyncIterableIterator } from '@azure/core-paging'; +import { PollerLike } from '@azure/core-lro'; +import { PollOperationState } from '@azure/core-lro'; +import { ProxyOptions } from '@azure/core-http'; +import { Readable } from 'stream'; +import { RequestPolicy } from '@azure/core-http'; +import { RequestPolicyFactory } from '@azure/core-http'; +import { RequestPolicyOptions } from '@azure/core-http'; +import { RestError } from '@azure/core-http'; +import { ServiceClientOptions } from '@azure/core-http'; +import { TokenCredential } from '@azure/core-http'; +import { TransferProgressEvent } from '@azure/core-http'; +import { UserAgentOptions } from '@azure/core-http'; +import { WebResource } from '@azure/core-http'; + +/** An Access policy */ +export declare interface AccessPolicy { + /** the date-time the policy is active */ + startsOn?: string; + /** the date-time the policy expires */ + expiresOn?: string; + /** the permissions for the acl policy */ + permissions?: string; +} + +/** Defines values for AccessTier. */ +export declare type AccessTier = "P4" | "P6" | "P10" | "P15" | "P20" | "P30" | "P40" | "P50" | "P60" | "P70" | "P80" | "Hot" | "Cool" | "Archive"; + +/** Defines values for AccountKind. */ +export declare type AccountKind = "Storage" | "BlobStorage" | "StorageV2" | "FileStorage" | "BlockBlobStorage"; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the + * values are set, this should be serialized with toString and set as the permissions field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class AccountSASPermissions { + /** + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - + */ + static parse(permissions: string): AccountSASPermissions; + /** + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: AccountSASPermissionsLike): AccountSASPermissions; + /** + * Permission to read resources and list queues and tables granted. + */ + read: boolean; + /** + * Permission to write resources granted. + */ + write: boolean; + /** + * Permission to create blobs and files granted. + */ + delete: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add: boolean; + /** + * Permission to create blobs and files granted. + */ + create: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update: boolean; + /** + * Permission to get and delete messages granted. + */ + process: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Permission to filter blobs. + */ + filter: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * A type that looks like an account SAS permission. + * Used in {@link AccountSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface AccountSASPermissionsLike { + /** + * Permission to read resources and list queues and tables granted. + */ + read?: boolean; + /** + * Permission to write resources granted. + */ + write?: boolean; + /** + * Permission to delete blobs and files granted. + */ + delete?: boolean; + /** + * Permission to delete versions granted. + */ + deleteVersion?: boolean; + /** + * Permission to list blob containers, blobs, shares, directories, and files granted. + */ + list?: boolean; + /** + * Permission to add messages, table entities, and append to blobs granted. + */ + add?: boolean; + /** + * Permission to create blobs and files granted. + */ + create?: boolean; + /** + * Permissions to update messages and table entities granted. + */ + update?: boolean; + /** + * Permission to get and delete messages granted. + */ + process?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Permission to filter blobs. + */ + filter?: boolean; + /** + * Permission to set immutability policy. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the + * values are set, this should be serialized with toString and set as the resources field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but + * the order of the resources is particular and this class guarantees correctness. + */ +export declare class AccountSASResourceTypes { + /** + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - + */ + static parse(resourceTypes: string): AccountSASResourceTypes; + /** + * Permission to access service level APIs granted. + */ + service: boolean; + /** + * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. + */ + container: boolean; + /** + * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. + */ + object: boolean; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value + * to true means that any SAS which uses these permissions will grant access to that service. Once all the + * values are set, this should be serialized with toString and set as the services field on an + * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but + * the order of the services is particular and this class guarantees correctness. + */ +export declare class AccountSASServices { + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services: string): AccountSASServices; + /** + * Permission to access blob resources granted. + */ + blob: boolean; + /** + * Permission to access file resources granted. + */ + file: boolean; + /** + * Permission to access queue resources granted. + */ + queue: boolean; + /** + * Permission to access table resources granted. + */ + table: boolean; + /** + * Converts the given services to a string. + * + */ + toString(): string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * AccountSASSignatureValues is used to generate a Shared Access Signature (SAS) for an Azure Storage account. Once + * all the values here are set appropriately, call {@link generateAccountSASQueryParameters} to obtain a representation + * of the SAS which can actually be applied to blob urls. Note: that both this class and {@link SASQueryParameters} + * exist because the former is mutable and a logical representation while the latter is immutable and used to generate + * actual REST requests. + * + * @see https://docs.microsoft.com/en-us/azure/storage/common/storage-dotnet-shared-access-signature-part-1 + * for more conceptual information on SAS + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * for descriptions of the parameters, including which are required + */ +export declare interface AccountSASSignatureValues { + /** + * If not provided, this defaults to the service version targeted by this version of the library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * The time after which the SAS will no longer work. + */ + expiresOn: Date; + /** + * Specifies which operations the SAS user may perform. Please refer to {@link AccountSASPermissions} for help + * constructing the permissions string. + */ + permissions: AccountSASPermissions; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * The values that indicate the services accessible with this SAS. Please refer to {@link AccountSASServices} to + * construct this value. + */ + services: string; + /** + * The values that indicate the resource types accessible with this SAS. Please refer + * to {@link AccountSASResourceTypes} to construct this value. + */ + resourceTypes: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +export declare class AnonymousCredential extends Credential_2 { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): AnonymousCredentialPolicy; +} + +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +export declare class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); +} + +/** Defines headers for AppendBlob_appendBlockFromUrl operation. */ +export declare interface AppendBlobAppendBlockFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlockFromURL} operation. + */ +export declare interface AppendBlobAppendBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content from the URI. + * This hash is used to verify the integrity of the append block during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the appendBlockFromUrl operation. */ +export declare type AppendBlobAppendBlockFromUrlResponse = AppendBlobAppendBlockFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockFromUrlHeaders; + }; +}; + +/** Defines headers for AppendBlob_appendBlock operation. */ +export declare interface AppendBlobAppendBlockHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This response header is returned only for append operations. It returns the offset at which the block was committed, in bytes. */ + blobAppendOffset?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link AppendBlobClient.appendBlock} operation. + */ +export declare interface AppendBlobAppendBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when appending append blob blocks. + */ + conditions?: AppendBlobRequestConditions; + /** + * Callback to receive events on the progress of append block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the append block content. This hash is used to verify the integrity of the append block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the appendBlock operation. */ +export declare type AppendBlobAppendBlockResponse = AppendBlobAppendBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobAppendBlockHeaders; + }; +}; + +/** + * AppendBlobClient defines a set of operations applicable to append blobs. + */ +export declare class AppendBlobClient extends BlobClient { + /** + * appendBlobsContext provided by protocol layer. + */ + private appendBlobContext; + /** + * + * Creates an instance of AppendBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of AppendBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to an append blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage append blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/appendblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): AppendBlobClient; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * + * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` + */ + create(options?: AppendBlobCreateOptions): Promise; + /** + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - + */ + createIfNotExists(options?: AppendBlobCreateIfNotExistsOptions): Promise; + /** + * Seals the append blob, making it read only. + * + * @param options - + */ + seal(options?: AppendBlobSealOptions): Promise; + /** + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block + * + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` + */ + appendBlock(body: HttpRequestBody, contentLength: number, options?: AppendBlobAppendBlockOptions): Promise; + /** + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url + * + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - + */ + appendBlockFromURL(sourceURL: string, sourceOffset: number, count: number, options?: AppendBlobAppendBlockFromURLOptions): Promise; +} + +/** Defines headers for AppendBlob_create operation. */ +export declare interface AppendBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link AppendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * HTTP headers to set when creating append blobs. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; +} + +/** + * Contains response data for the {@link appendBlobClient.createIfNotExists} operation. + */ +export declare interface AppendBlobCreateIfNotExistsResponse extends AppendBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link AppendBlobClient.create} operation. + */ +export declare interface AppendBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating append blobs. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when creating append blobs. A common header + * to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type AppendBlobCreateResponse = AppendBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: AppendBlobCreateHeaders; + }; +}; + +/** + * Conditions to add to the creation of this append blob. + */ +export declare interface AppendBlobRequestConditions extends BlobRequestConditions, AppendPositionAccessConditions { +} + +/** + * Options to configure {@link AppendBlobClient.seal} operation. + */ +export declare interface AppendBlobSealOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet. + */ + conditions?: AppendBlobRequestConditions; +} + +/** Parameter group */ +export declare interface AppendPositionAccessConditions { + /** Optional conditional header. The max length in bytes permitted for the append blob. If the Append Block operation would cause the blob to exceed that limit or if the blob size is already greater than the value specified in this header, the request will fail with MaxBlobSizeConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + maxSize?: number; + /** Optional conditional header, used only for the Append Block operation. A number indicating the byte offset to compare. Append Block will succeed only if the append position is equal to this number. If it is not, the request will fail with the AppendPositionConditionNotMet error (HTTP status code 412 - Precondition Failed). */ + appendPosition?: number; +} + +/** Defines values for ArchiveStatus. */ +export declare type ArchiveStatus = "rehydrate-pending-to-hot" | "rehydrate-pending-to-cool"; + +export { BaseRequestPolicy } + +/** + * A request associated with a batch operation. + */ +export declare interface BatchSubRequest { + /** + * The URL of the resource to request operation. + */ + url: string; + /** + * The credential used for sub request. + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. + * You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; +} + +/** + * The response data associated with a single request within a batch operation. + */ +export declare interface BatchSubResponse { + /** + * The status code of the sub operation. + */ + status: number; + /** + * The status message of the sub operation. + */ + statusMessage: string; + /** + * The error code of the sub operation, if the sub operation failed. + */ + errorCode?: string; + /** + * The HTTP response headers. + */ + headers: HttpHeaders; + /** + * The body as text. + */ + bodyAsText?: string; + /** + * The batch sub request corresponding to the sub response. + */ + _request: BatchSubRequest; +} + +/** Defines headers for Blob_abortCopyFromURL operation. */ +export declare interface BlobAbortCopyFromURLHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.abortCopyFromURL} operation. + */ +export declare interface BlobAbortCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the abortCopyFromURL operation. */ +export declare type BlobAbortCopyFromURLResponse = BlobAbortCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobAbortCopyFromURLHeaders; + }; +}; + +/** + * Options to configure Blob - Acquire Lease operation. + */ +export declare interface BlobAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobBatch represents an aggregated set of operations on blobs. + * Currently, only `delete` and `setAccessTier` are supported. + */ +export declare class BlobBatch { + private batchRequest; + private readonly batch; + private batchType; + constructor(); + /** + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + */ + getMultiPartContentType(): string; + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody(): string; + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests(): Map; + private addSubRequestInternal; + private setBatchType; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlob(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * The deleteBlob operation marks the specified blob or snapshot for deletion. + * The blob is later deleted during garbage collection. + * Only one kind of operation is allowed per batch request. + * + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param options - + */ + deleteBlob(blobClient: BlobClient, options?: BlobDeleteOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param url - The url of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobAccessTier(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * The setBlobAccessTier operation sets the tier on a blob. + * The operation is allowed on block blobs in a blob storage or general purpose v2 account. + * Only one kind of operation is allowed per batch request. + * + * A block blob's tier determines Hot/Cool/Archive storage type. + * This operation does not update the blob's ETag. + * For detailed information about block blob level tiering + * see [hot, cool, and archive access tiers](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers). + * The operation will be authenticated and authorized + * with specified credential. See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClient - The BlobClient. + * @param tier - + * @param options - + */ + setBlobAccessTier(blobClient: BlobClient, tier: AccessTier, options?: BlobSetTierOptions): Promise; +} + +/** + * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + */ +export declare class BlobBatchClient { + private serviceOrContainerContext; + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobBatchClient. + * + * @param url - A url pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. + */ + createBatch(): BlobBatch; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operations will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resources to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - + */ + deleteBlobs(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: BlobDeleteOptions): Promise; + /** + * Create multiple delete operations to mark the specified blobs or snapshots for deletion. + * Note that in order to delete a blob, you must delete all of its snapshots. + * You can delete both at the same time. See [delete operation details](https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob). + * The operation(subrequest) will be authenticated and authorized with specified credential. + * See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs to delete. + * @param options - + */ + deleteBlobs(blobClients: BlobClient[], options?: BlobDeleteOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param urls - The urls of the blob resource to delete. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param tier - + * @param options - + */ + setBlobsAccessTier(urls: string[], credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Create multiple set tier operations to set the tier on a blob. + * The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * See [set blob tier details](https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier). + * The operation(subrequest) will be authenticated and authorized + * with specified credential.See [blob batch authorization details](https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#authorization). + * + * @param blobClients - The BlobClients for the blobs which should have a new tier set. + * @param tier - + * @param options - + */ + setBlobsAccessTier(blobClients: BlobClient[], tier: AccessTier, options?: BlobSetTierOptions): Promise; + /** + * Submit batch request which consists of multiple subrequests. + * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` + * + * Example usage: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * Example using a lease: + * + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. + * @param options - + */ + submitBatch(batchRequest: BlobBatch, options?: BlobBatchSubmitBatchOptionalParams): Promise; +} + +/** + * Contains response data for the {@link deleteBlobs} operation. + */ +export declare type BlobBatchDeleteBlobsResponse = BlobBatchSubmitBatchResponse; + +/** + * Contains response data for the {@link setBlobsAccessTier} operation. + */ +export declare type BlobBatchSetBlobsAccessTierResponse = BlobBatchSubmitBatchResponse; + +/** + * Options to configure the Service - Submit Batch Optional Params. + */ +export declare interface BlobBatchSubmitBatchOptionalParams extends ServiceSubmitBatchOptionalParamsModel { +} + +/** + * Contains response data for blob batch operations. + */ +export declare type BlobBatchSubmitBatchResponse = ParsedBatchResponse & ServiceSubmitBatchHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLOptions extends BlobStartCopyFromURLOptions { + /** + * The amount of time in milliseconds the poller should wait between + * calls to the service to determine the status of the Blob copy. + * Defaults to 15 seconds. + */ + intervalInMs?: number; + /** + * Callback to receive the state of the copy progress. + */ + onProgress?: (state: BlobBeginCopyFromUrlPollState) => void; + /** + * Serialized poller state that can be used to resume polling from. + * This may be useful when starting a copy on one process or thread + * and you wish to continue polling on another process or thread. + * + * To get serialized poller state, call `poller.toString()` on an existing + * poller. + */ + resumeFrom?: string; +} + +/** + * The state used by the poller returned from {@link BlobClient.beginCopyFromURL}. + * + * This state is passed into the user-specified `onProgress` callback + * whenever copy progress is detected. + */ +export declare interface BlobBeginCopyFromUrlPollState extends PollOperationState { + /** + * The instance of {@link BlobClient} that was used when calling {@link BlobClient.beginCopyFromURL}. + */ + readonly blobClient: CopyPollerBlobClient; + /** + * The copyId that identifies the in-progress blob copy. + */ + copyId?: string; + /** + * the progress of the blob copy as reported by the service. + */ + copyProgress?: string; + /** + * The source URL provided in {@link BlobClient.beginCopyFromURL}. + */ + copySource: string; + /** + * The options that were passed to the initial {@link BlobClient.beginCopyFromURL} call. + * This is exposed for the poller and should not be modified directly. + */ + readonly startCopyFromURLOptions?: BlobStartCopyFromURLOptions; +} + +/** + * Contains response data for the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobBeginCopyFromURLResponse extends BlobStartCopyFromURLResponse { +} + +/** + * Options to configure Blob - Break Lease operation. + */ +export declare interface BlobBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Change Lease operation. + */ +export declare interface BlobChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, + * append blob, or page blob. + */ +export declare class BlobClient extends StorageClient { + /** + * blobContext provided by protocol layer. + */ + private blobContext; + private _name; + private _containerName; + private _versionId?; + private _snapshot?; + /** + * The name of the blob. + */ + get name(): string; + /** + * The name of the storage container the blob is associated with. + */ + get containerName(): string; + /** + * + * Creates an instance of BlobClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot: string): BlobClient; + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId: string): BlobClient; + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient(): AppendBlobClient; + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient(): BlockBlobClient; + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient(): PageBlobClient; + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + download(offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. + * + * @param options - options to Exists operation. + */ + exists(options?: BlobExistsOptions): Promise; + /** + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. + */ + getProperties(options?: BlobGetPropertiesOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + delete(options?: BlobDeleteOptions): Promise; + /** + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param options - Optional options to Blob Delete operation. + */ + deleteIfExists(options?: BlobDeleteOptions): Promise; + /** + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. + */ + undelete(options?: BlobUndeleteOptions): Promise; + /** + * Sets system properties on the blob. + * + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. + */ + setHTTPHeaders(blobHTTPHeaders?: BlobHTTPHeaders, options?: BlobSetHTTPHeadersOptions): Promise; + /** + * Sets user-defined metadata for the specified blob as one or more name-value pairs. + * + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: BlobSetMetadataOptions): Promise; + /** + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - + */ + setTags(tags: Tags, options?: BlobSetTagsOptions): Promise; + /** + * Gets the tags associated with the underlying blob. + * + * @param options - + */ + getTags(options?: BlobGetTagsOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the blob. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * + * @param options - Optional options to the Blob Create Snapshot operation. + */ + createSnapshot(options?: BlobCreateSnapshotOptions): Promise; + /** + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + beginCopyFromURL(copySource: string, options?: BlobBeginCopyFromURLOptions): Promise, BlobBeginCopyFromURLResponse>>; + /** + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. + */ + abortCopyFromURL(copyId: string, options?: BlobAbortCopyFromURLOptions): Promise; + /** + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - + */ + syncCopyFromURL(copySource: string, options?: BlobSyncCopyFromURLOptions): Promise; + /** + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. + */ + setAccessTier(tier: BlockBlobTier | PremiumPageBlobTier | string, options?: BlobSetTierOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob in parallel to a buffer. + * Offset and count are optional, downloads the entire blob if they are not provided. + * + * Warning: Buffers can only support files up to about one gigabyte on 32-bit systems or about two + * gigabytes on 64-bit systems due to limitations of Node.js/V8. For blobs larger than this size, + * consider {@link downloadToFile}. + * + * @param buffer - Buffer to be fill, must have length larger than count + * @param offset - From which position of the block blob to download(in bytes) + * @param count - How much data(in bytes) to be downloaded. Will download to the end when passing undefined + * @param options - BlobDownloadToBufferOptions + */ + downloadToBuffer(buffer: Buffer, offset?: number, count?: number, options?: BlobDownloadToBufferOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. + */ + downloadToFile(filePath: string, offset?: number, count?: number, options?: BlobDownloadOptions): Promise; + private getBlobAndContainerNamesFromUrl; + /** + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. + */ + private startCopyFromURL; + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: BlobGenerateSasUrlOptions): Promise; + /** + * Delete the immutablility policy on the blob. + * + * @param options - Optional options to delete immutability policy on the blob. + */ + deleteImmutabilityPolicy(options?: BlobDeleteImmutabilityPolicyOptions): Promise; + /** + * Set immutablility policy on the blob. + * + * @param options - Optional options to set immutability policy on the blob. + */ + setImmutabilityPolicy(immutabilityPolicy: BlobImmutabilityPolicy, options?: BlobSetImmutabilityPolicyOptions): Promise; + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + setLegalHold(legalHoldEnabled: boolean, options?: BlobSetLegalHoldOptions): Promise; +} + +/** Defines headers for Blob_copyFromURL operation. */ +export declare interface BlobCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: SyncCopyStatusType; + /** This response header is returned so that the client can check for the integrity of the copied content. This header is only returned if the source content MD5 was specified. */ + contentMD5?: Uint8Array; + /** This response header is returned so that the client can check for the integrity of the copied content. */ + xMsContentCrc64?: Uint8Array; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyFromURL operation. */ +export declare type BlobCopyFromURLResponse = BlobCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCopyFromURLHeaders; + }; +}; + +/** Defines values for BlobCopySourceTags. */ +export declare type BlobCopySourceTags = "REPLACE" | "COPY"; + +/** Defines headers for Blob_createSnapshot operation. */ +export declare interface BlobCreateSnapshotHeaders { + /** Uniquely identifies the snapshot and indicates the snapshot version. It may be used in subsequent requests to access the snapshot */ + snapshot?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** True if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. For a snapshot request, this header is set to true when metadata was provided in the request and encrypted with a customer-provided key. */ + isServerEncrypted?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.createSnapshot} operation. + */ +export declare interface BlobCreateSnapshotOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet when creating blob snapshots. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the createSnapshot operation. */ +export declare type BlobCreateSnapshotResponse = BlobCreateSnapshotHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobCreateSnapshotHeaders; + }; +}; + +/** Defines headers for Blob_delete operation. */ +export declare interface BlobDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link BlobClient.deleteIfExists} operation. + */ +export declare interface BlobDeleteIfExistsResponse extends BlobDeleteResponse { + /** + * Indicate whether the blob is successfully deleted. Is false if the blob does not exist in the first place. + */ + succeeded: boolean; +} + +/** Defines headers for Blob_deleteImmutabilityPolicy operation. */ +export declare interface BlobDeleteImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options for deleting immutability policy {@link BlobClient.deleteImmutabilityPolicy} operation. + */ +export declare interface BlobDeleteImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the deleteImmutabilityPolicy operation. */ +export declare type BlobDeleteImmutabilityPolicyResponse = BlobDeleteImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteImmutabilityPolicyHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.delete} operation. + */ +export declare interface BlobDeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting blobs. + */ + conditions?: BlobRequestConditions; + /** + * Specifies options to delete blobs that have associated snapshots. + * - `include`: Delete the base blob and all of its snapshots. + * - `only`: Delete only the blob's snapshots and not the blob itself. + */ + deleteSnapshots?: DeleteSnapshotsOptionType; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the delete operation. */ +export declare type BlobDeleteResponse = BlobDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDeleteHeaders; + }; +}; + +/** Defines headers for Blob_download operation. */ +export declare interface BlobDownloadHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** The number of tags associated with the blob */ + tagCount?: number; + /** If this blob has been sealed */ + isSealed?: boolean; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** Optional parameters. */ +export declare interface BlobDownloadOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + leaseAccessConditions?: LeaseAccessConditions; + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** Parameter group */ + cpkInfo?: CpkInfo; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** The snapshot parameter is an opaque DateTime value that, when present, specifies the blob snapshot to retrieve. For more information on working with blob snapshots, see Creating a Snapshot of a Blob. */ + snapshot?: string; + /** The version id parameter is an opaque DateTime value that, when present, specifies the version of the blob to operate on. It's for service version 2019-10-10 and newer. */ + versionId?: string; + /** Return only the bytes of the blob in the specified range. */ + range?: string; + /** When set to true and specified together with the Range, the service returns the MD5 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentMD5?: boolean; + /** When set to true and specified together with the Range, the service returns the CRC64 hash for the range, as long as the range is less than or equal to 4 MB in size. */ + rangeGetContentCRC64?: boolean; +} + +/** + * Options to configure the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * An opaque DateTime string value that, when present, specifies the blob snapshot to retrieve. + */ + snapshot?: string; + /** + * When this is set to true and download range of blob, the service returns the MD5 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentMD5?: boolean; + /** + * When this is set to true and download range of blob, the service returns the CRC64 hash for the range, + * as long as the range is less than or equal to 4 MB in size. + * + * rangeGetContentCrc64 and rangeGetContentMD5 cannot be set at same time. + */ + rangeGetContentCrc64?: boolean; + /** + * Conditions to meet when downloading blobs. + */ + conditions?: BlobRequestConditions; + /** + * Call back to receive events on the progress of download operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original body download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional `FileClient.download()` request will be made + * from the broken point, until the requested range has been successfully downloaded or maxRetryRequests is reached. + * + * Default value is 5, please set a larger value when loading large files in poor network. + */ + maxRetryRequests?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the download operation. */ +export declare type BlobDownloadResponseModel = BlobDownloadHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobDownloadHeaders; + }; +}; + +/** + * Contains response data for the {@link BlobClient.download} operation. + */ +export declare interface BlobDownloadResponseParsed extends BlobDownloadResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** + * Option interface for the {@link BlobClient.downloadToBuffer} operation. + */ +export declare interface BlobDownloadToBufferOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * blockSize is the data every request trying to download. + * Must be greater than or equal to 0. + * If set to 0 or undefined, blockSize will automatically calculated according to the blob size. + */ + blockSize?: number; + /** + * Optional. ONLY AVAILABLE IN NODE.JS. + * + * How many retries will perform when original block download stream unexpected ends. + * Above kind of ends will not trigger retry policy defined in a pipeline, + * because they doesn't emit network errors. + * + * With this option, every additional retry means an additional FileClient.download() request will be made + * from the broken point, until the requested block has been successfully downloaded or + * maxRetryRequestsPerBlock is reached. + * + * Default value is 5, please set a larger value when in poor network. + */ + maxRetryRequestsPerBlock?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel download. + */ + concurrency?: number; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Options to configure the {@link BlobClient.exists} operation. + */ +export declare interface BlobExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Conditions to meet. + */ + conditions?: BlobRequestConditions; +} + +/** + * An interface representing BlobFlatListSegment. + */ +export declare interface BlobFlatListSegment { + blobItems: BlobItem[]; +} + +export declare interface BlobFlatListSegmentModel { + blobItems: BlobItemInternal[]; +} + +/** + * Options to configure {@link BlobClient.generateSasUrl} operation. + */ +export declare interface BlobGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: BlobSASPermissions; +} + +/** Defines headers for Blob_getProperties operation. */ +export declare interface BlobGetPropertiesHeaders { + /** Returns the date and time the blob was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** Returns the date and time the blob was created. */ + createdOn?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the destination blob of the replication. */ + objectReplicationPolicyId?: string; + /** Optional. Only valid when Object Replication is enabled for the storage container and on the source blob of the replication. When retrieving this header, it will return the header with the policy id and rule id (e.g. x-ms-or-policyid_ruleid), and the value will be the status of the replication (e.g. complete, failed). */ + objectReplicationRules?: { + [propertyName: string]: string; + }; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletedOn?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Included if the blob is incremental copy blob. */ + isIncrementalCopy?: boolean; + /** Included if the blob is incremental copy blob or incremental copy snapshot, if x-ms-copy-status is success. Snapshot time of the last successful incremental copy snapshot for this blob. */ + destinationSnapshot?: string; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** The size of the blob in bytes. For a page blob, this header returns the value of the x-ms-blob-content-length header that is stored with the blob. */ + contentLength?: number; + /** The content type specified for the blob. The default content type is 'application/octet-stream' */ + contentType?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** The tier of page blob on a premium storage account or tier of block blob on blob storage LRS accounts. For a list of allowed premium page blob tiers, see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/premium-storage#features. For blob storage LRS accounts, valid values are Hot/Cool/Archive. */ + accessTier?: string; + /** For page blobs on a premium storage account only. If the access tier is not explicitly set on the blob, the tier is inferred based on its content length and this header will be returned with true value. */ + accessTierInferred?: boolean; + /** For blob storage LRS accounts, valid values are rehydrate-pending-to-hot/rehydrate-pending-to-cool. If the blob is being rehydrated and is not complete then this header is returned indicating that rehydrate is pending and also tells the destination tier. */ + archiveStatus?: string; + /** The time the tier was changed on the object. This is only returned if the tier on the block blob was ever set. */ + accessTierChangedOn?: Date; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** The value of this header indicates whether version of this blob is a current version, see also x-ms-version-id header. */ + isCurrentVersion?: boolean; + /** The number of tags associated with the blob */ + tagCount?: number; + /** The time this blob will expire. */ + expiresOn?: Date; + /** If this blob has been sealed */ + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. */ + rehydratePriority?: RehydratePriority; + /** UTC date/time value generated by the service that indicates the time at which the blob was last read or written to */ + lastAccessed?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting blob properties. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** + * Contains response data for the {@link BlobClient.getProperties} operation. + */ +export declare interface BlobGetPropertiesResponse extends BlobGetPropertiesResponseModel { + /** + * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. + */ + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + /** + * Object Replication Policy Id of the destination blob. + */ + objectReplicationDestinationPolicyId?: string; +} + +/** Contains response data for the getProperties operation. */ +export declare type BlobGetPropertiesResponseModel = BlobGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobGetPropertiesHeaders; + }; +}; + +/** Defines headers for Blob_getTags operation. */ +export declare interface BlobGetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.getTags} operation. + */ +export declare interface BlobGetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** + * Contains response data for the {@link BlobClient.getTags} operation. + */ +export declare type BlobGetTagsResponse = { + tags: Tags; +} & BlobGetTagsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: BlobGetTagsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: BlobTags; + }; +}; + +/** + * An interface representing BlobHierarchyListSegment. + */ +export declare interface BlobHierarchyListSegment { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItem[]; +} + +export declare interface BlobHierarchyListSegmentModel { + blobPrefixes?: BlobPrefix[]; + blobItems: BlobItemInternal[]; +} + +/** Parameter group */ +export declare interface BlobHTTPHeaders { + /** Optional. Sets the blob's cache control. If specified, this property is stored with the blob and returned with a read request. */ + blobCacheControl?: string; + /** Optional. Sets the blob's content type. If specified, this property is stored with the blob and returned with a read request. */ + blobContentType?: string; + /** Optional. An MD5 hash of the blob content. Note that this hash is not validated, as the hashes for the individual blocks were validated when each was uploaded. */ + blobContentMD5?: Uint8Array; + /** Optional. Sets the blob's content encoding. If specified, this property is stored with the blob and returned with a read request. */ + blobContentEncoding?: string; + /** Optional. Set the blob's content language. If specified, this property is stored with the blob and returned with a read request. */ + blobContentLanguage?: string; + /** Optional. Sets the blob's Content-Disposition header. */ + blobContentDisposition?: string; +} + +/** + * Describe immutable policy for blob. + */ +export declare interface BlobImmutabilityPolicy { + /** + * Specifies the date time when the blobs immutability policy is set to expire. + */ + expiriesOn?: Date; + /** + * Specifies the immutability policy mode to set on the blob. + */ + policyMode?: BlobImmutabilityPolicyMode; +} + +/** Defines values for BlobImmutabilityPolicyMode. */ +export declare type BlobImmutabilityPolicyMode = "Mutable" | "Unlocked" | "Locked"; + +/** + * An Azure Storage blob + */ +export declare interface BlobItem { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + properties: BlobProperties; + metadata?: { + [propertyName: string]: string; + }; + tags?: Tags; + objectReplicationSourceProperties?: ObjectReplicationPolicy[]; + hasVersionsOnly?: boolean; +} + +/** An Azure Storage blob */ +export declare interface BlobItemInternal { + name: string; + deleted: boolean; + snapshot: string; + versionId?: string; + isCurrentVersion?: boolean; + /** Properties of a blob */ + properties: BlobProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; + /** Blob tags */ + blobTags?: BlobTags; + /** Dictionary of */ + objectReplicationMetadata?: { + [propertyName: string]: string; + }; + /** Inactive root blobs which have any versions would have such tag with value true. */ + hasVersionsOnly?: boolean; +} + +/** + * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. + */ +export declare class BlobLeaseClient { + private _leaseId; + private _url; + private _containerOrBlobOperation; + private _isContainer; + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId(): string; + /** + * Gets the url. + * + * @readonly + */ + get url(): string; + /** + * Creates an instance of BlobLeaseClient. + * @param client - The client to make the lease operation requests. + * @param leaseId - Initial proposed lease id. + */ + constructor(client: ContainerClient | BlobClient, leaseId?: string); + /** + * Establishes and manages a lock on a container for delete operations, or on a blob + * for write and delete operations. + * The lock duration can be 15 to 60 seconds, or can be infinite. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param duration - Must be between 15 to 60 seconds, or infinite (-1) + * @param options - option to configure lease management operations. + * @returns Response data for acquire lease operation. + */ + acquireLease(duration: number, options?: LeaseOperationOptions): Promise; + /** + * To change the ID of the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param proposedLeaseId - the proposed new lease Id. + * @param options - option to configure lease management operations. + * @returns Response data for change lease operation. + */ + changeLease(proposedLeaseId: string, options?: LeaseOperationOptions): Promise; + /** + * To free the lease if it is no longer needed so that another client may + * immediately acquire a lease against the container or the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - option to configure lease management operations. + * @returns Response data for release lease operation. + */ + releaseLease(options?: LeaseOperationOptions): Promise; + /** + * To renew the lease. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param options - Optional option to configure lease management operations. + * @returns Response data for renew lease operation. + */ + renewLease(options?: LeaseOperationOptions): Promise; + /** + * To end the lease but ensure that another client cannot acquire a new lease + * until the current lease period has expired. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container + * and + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + * + * @param breakPeriod - Break period + * @param options - Optional options to configure lease management operations. + * @returns Response data for break lease operation. + */ + breakLease(breakPeriod: number, options?: LeaseOperationOptions): Promise; +} + +export declare interface BlobPrefix { + name: string; +} + +/** Properties of a blob */ +export declare interface BlobProperties { + createdOn?: Date; + lastModified: Date; + etag: string; + /** Size in bytes */ + contentLength?: number; + contentType?: string; + contentEncoding?: string; + contentLanguage?: string; + contentMD5?: Uint8Array; + contentDisposition?: string; + cacheControl?: string; + blobSequenceNumber?: number; + blobType?: BlobType; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + copyId?: string; + copyStatus?: CopyStatusType; + copySource?: string; + copyProgress?: string; + copyCompletedOn?: Date; + copyStatusDescription?: string; + serverEncrypted?: boolean; + incrementalCopy?: boolean; + destinationSnapshot?: string; + deletedOn?: Date; + remainingRetentionDays?: number; + accessTier?: AccessTier; + accessTierInferred?: boolean; + archiveStatus?: ArchiveStatus; + customerProvidedKeySha256?: string; + /** The name of the encryption scope under which the blob is encrypted. */ + encryptionScope?: string; + accessTierChangedOn?: Date; + tagCount?: number; + expiresOn?: Date; + isSealed?: boolean; + /** If an object is in rehydrate pending state then this header is returned with priority of rehydrate. Valid values are High and Standard. */ + rehydratePriority?: RehydratePriority; + lastAccessedOn?: Date; + /** UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. */ + immutabilityPolicyExpiresOn?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; + /** Indicates if a legal hold is present on the blob. */ + legalHold?: boolean; +} + +/** + * Options to query blob with Apache Arrow format. Only valid for {@link BlockBlobQueryOptions.outputTextConfiguration}. + */ +export declare interface BlobQueryArrowConfiguration { + /** + * Kind. + */ + kind: "arrow"; + /** + * List of {@link BlobQueryArrowField} describing the schema of the data. + */ + schema: BlobQueryArrowField[]; +} + +/** + * Describe a field in {@link BlobQueryArrowConfiguration}. + */ +export declare interface BlobQueryArrowField { + /** + * The type of the field. + */ + type: BlobQueryArrowFieldType; + /** + * The name of the field. + */ + name?: string; + /** + * The precision of the field. Required if type is "decimal". + */ + precision?: number; + /** + * The scale of the field. Required if type is is "decimal". + */ + scale?: number; +} + +/** + * The type of a {@link BlobQueryArrowField}. + */ +export declare type BlobQueryArrowFieldType = "int64" | "bool" | "timestamp[ms]" | "string" | "double" | "decimal"; + +/** + * Options to query blob with CSV format. + */ +export declare interface BlobQueryCsvTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a CSV format blob. + */ + kind: "csv"; + /** + * Column separator. Default is ",". + */ + columnSeparator?: string; + /** + * Field quote. + */ + fieldQuote?: string; + /** + * Escape character. + */ + escapeCharacter?: string; + /** + * Has headers. Default is false. + */ + hasHeaders?: boolean; +} + +/** + * Blob query error type. + */ +export declare interface BlobQueryError { + /** + * Whether error is fatal. Fatal error will stop query. + */ + isFatal: boolean; + /** + * Error name. + */ + name: string; + /** + * Position in bytes of the query. + */ + position: number; + /** + * Error description. + */ + description: string; +} + +/** Defines headers for Blob_query operation. */ +export declare interface BlobQueryHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + metadata?: { + [propertyName: string]: string; + }; + /** The number of bytes present in the response body. */ + contentLength?: number; + /** The media type of the body of the response. For Download Blob this is 'application/octet-stream' */ + contentType?: string; + /** Indicates the range of bytes returned in the event that the client requested a subset of the blob by setting the 'Range' request header. */ + contentRange?: string; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header returns the value that was specified for the Content-Encoding request header */ + contentEncoding?: string; + /** This header is returned if it was previously specified for the blob. */ + cacheControl?: string; + /** This header returns the value that was specified for the 'x-ms-blob-content-disposition' header. The Content-Disposition response header field conveys additional information about how to process the response payload, and also can be used to attach additional metadata. For example, if set to attachment, it indicates that the user-agent should not display the response, but instead show a Save As dialog with a filename other than the blob name specified. */ + contentDisposition?: string; + /** This header returns the value that was specified for the Content-Language request header. */ + contentLanguage?: string; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** The blob's type. */ + blobType?: BlobType; + /** Conclusion time of the last attempted Copy Blob operation where this blob was the destination blob. This value can specify the time of a completed, aborted, or failed copy attempt. This header does not appear if a copy is pending, if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copyCompletionTime?: Date; + /** Only appears when x-ms-copy-status is failed or pending. Describes the cause of the last fatal or non-fatal copy operation failure. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyStatusDescription?: string; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** Contains the number of bytes copied and the total bytes in the source in the last attempted Copy Blob operation where this blob was the destination blob. Can show between 0 and Content-Length bytes copied. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List */ + copyProgress?: string; + /** URL up to 2 KB in length that specifies the source blob or file used in the last attempted Copy Blob operation where this blob was the destination blob. This header does not appear if this blob has never been the destination in a Copy Blob operation, or if this blob has been modified after a concluded Copy Blob operation using Set Blob Properties, Put Blob, or Put Block List. */ + copySource?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Indicates that the service supports requests for partial blob content. */ + acceptRanges?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The number of committed blocks present in the blob. This header is returned only for append blobs. */ + blobCommittedBlockCount?: number; + /** The value of this header is set to true if the blob data and application metadata are completely encrypted using the specified algorithm. Otherwise, the value is set to false (when the blob is unencrypted, or if only parts of the blob/application metadata are encrypted). */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** If the blob has a MD5 hash, and if request contains range header (Range or x-ms-range), this response header is returned with the value of the whole blob's MD5 value. This value may or may not be equal to the value returned in Content-MD5 header, with the latter calculated from the requested range */ + blobContentMD5?: Uint8Array; + /** Error Code */ + errorCode?: string; + /** If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to true, then the request returns a crc64 for the range, as long as the range size is less than or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is specified in the same request, it will fail with 400(Bad Request). */ + contentCrc64?: Uint8Array; +} + +/** + * Options to query blob with JSON format. + */ +export declare interface BlobQueryJsonTextConfiguration { + /** + * Record separator. + */ + recordSeparator: string; + /** + * Query for a JSON format blob. + */ + kind: "json"; +} + +/** + * Options to query blob with Parquet format. Only valid for {@link BlockBlobQueryOptions.inputTextConfiguration}. + */ +export declare interface BlobQueryParquetConfiguration { + /** + * Kind. + */ + kind: "parquet"; +} + +/** Contains response data for the query operation. */ +export declare type BlobQueryResponseModel = BlobQueryHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobQueryHeaders; + }; +}; + +/** + * Options to configure Blob - Release Lease operation. + */ +export declare interface BlobReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Blob - Renew Lease operation. + */ +export declare interface BlobRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease of a blob. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * standard HTTP conditional headers, tags condition and lease condition + */ +export declare interface BlobRequestConditions extends ModifiedAccessConditions, LeaseAccessConditions { +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting + * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all + * the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class BlobSASPermissions { + /** + * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): BlobSASPermissions; + /** + * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: BlobSASPermissionsLike): BlobSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * @returns A string which represents the BlobSASPermissions + */ + toString(): string; +} + +/** + * A type that looks like a Blob SAS permission. + * Used in {@link BlobSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface BlobSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * BlobSASSignatureValues is used to help generating Blob service SAS tokens for containers or blobs. + */ +export declare interface BlobSASSignatureValues { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to either {@link ContainerSASPermissions} or {@link BlobSASPermissions} depending on the resource + * being accessed for help constructing the permissions string. + */ + permissions?: BlobSASPermissions | ContainerSASPermissions; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * The name of the container the SAS user may access. + */ + containerName: string; + /** + * Optional. The blob name of the SAS user may access. Required if snapshotTime or versionId is provided. + */ + blobName?: string; + /** + * Optional. Snapshot timestamp string the SAS user may access. Only supported from API version 2018-11-09. + */ + snapshotTime?: string; + /** + * Optional. VersionId of the blob version the SAS user may access. Only supported from API version 2019-10-10. + */ + versionId?: string; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; + /** + * Optional. Beginning in version 2020-02-10, specifies the Authorized AAD Object ID in GUID format. The AAD Object ID of a user + * authorized by the owner of the user delegation key to perform the action granted by the SAS. The Azure Storage service will + * ensure that the owner of the user delegation key has the required permissions before granting access but no additional permission + * check for the user specified in this value will be performed. This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * Optional. Beginning in version 2020-02-10, this is a GUID value that will be logged in the storage diagnostic logs and can be used to + * correlate SAS generation with storage resource access. This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** + * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you + * to manipulate blob containers. + */ +export declare class BlobServiceClient extends StorageClient { + /** + * serviceContext provided by protocol layer. + */ + private serviceContext; + /** + * + * Creates an instance of BlobServiceClient from connection string. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. + */ + static fromConnectionString(connectionString: string, options?: StoragePipelineOptions): BlobServiceClient; + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + * + * Example using DefaultAzureCredential from `@azure/identity`: + * + * ```js + * const account = ""; + * + * const defaultAzureCredential = new DefaultAzureCredential(); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * defaultAzureCredential + * ); + * ``` + * + * Example using an account name/key: + * + * ```js + * const account = "" + * const sharedKeyCredential = new StorageSharedKeyCredential(account, ""); + * + * const blobServiceClient = new BlobServiceClient( + * `https://${account}.blob.core.windows.net`, + * sharedKeyCredential + * ); + * ``` + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlobServiceClient. + * + * @param url - A Client string pointing to Azure Storage blob service, such as + * "https://myaccount.blob.core.windows.net". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a {@link ContainerClient} object + * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * ``` + */ + getContainerClient(containerName: string): ContainerClient; + /** + * Create a Blob container. + * + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. + */ + createContainer(containerName: string, options?: ContainerCreateOptions): Promise<{ + containerClient: ContainerClient; + containerCreateResponse: ContainerCreateResponse; + }>; + /** + * Deletes a Blob container. + * + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. + */ + deleteContainer(containerName: string, options?: ContainerDeleteMethodOptions): Promise; + /** + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. + * + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. + */ + undeleteContainer(deletedContainerName: string, deletedContainerVersion: string, options?: ServiceUndeleteContainerOptions): Promise<{ + containerClient: ContainerClient; + containerUndeleteResponse: ContainerUndeleteResponse; + }>; + /** + * Rename an existing Blob Container. + * + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. + */ + private renameContainer; + /** + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. + */ + getProperties(options?: ServiceGetPropertiesOptions): Promise; + /** + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties + * + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. + */ + setProperties(properties: BlobServiceProperties, options?: ServiceSetPropertiesOptions): Promise; + /** + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats + * + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. + */ + getStatistics(options?: ServiceGetStatisticsOptions): Promise; + /** + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information + * + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. + */ + getAccountInfo(options?: ServiceGetAccountInfoOptions): Promise; + /** + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. + */ + private listContainersSegment; + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ServiceFindBlobByTagsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses + * + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator for Container Items + * + * @param options - Options to list containers operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the containers + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the containers in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); + * } + * } + * ``` + * + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. + */ + listContainers(options?: ServiceListContainersOptions): PagedAsyncIterableIterator; + /** + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). + * + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + */ + getUserDelegationKey(startsOn: Date, expiresOn: Date, options?: ServiceGetUserDelegationKeyOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this service. + */ + getBlobBatchClient(): BlobBatchClient; + /** + * Only available for BlobServiceClient constructed with a shared key credential. + * + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas + * + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateAccountSasUrl(expiresOn?: Date, permissions?: AccountSASPermissions, resourceTypes?: string, options?: ServiceGenerateAccountSasUrlOptions): string; +} + +/** Storage Service Properties. */ +export declare interface BlobServiceProperties { + /** Azure Analytics Logging settings. */ + blobAnalyticsLogging?: Logging; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + hourMetrics?: Metrics; + /** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ + minuteMetrics?: Metrics; + /** The set of CORS rules. */ + cors?: CorsRule[]; + /** The default version to use for requests to the Blob service if an incoming request's version is not specified. Possible values include version 2008-10-27 and all more recent versions */ + defaultServiceVersion?: string; + /** the retention policy which determines how long the associated data should persist */ + deleteRetentionPolicy?: RetentionPolicy; + /** The properties that enable an account to host a static website */ + staticWebsite?: StaticWebsite; +} + +/** Stats for the storage service. */ +export declare interface BlobServiceStatistics { + /** Geo-Replication information for the Secondary Storage Service */ + geoReplication?: GeoReplication; +} + +/** Defines headers for Blob_setHttpHeaders operation. */ +export declare interface BlobSetHTTPHeadersHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setHTTPHeaders} operation. + */ +export declare interface BlobSetHTTPHeadersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob HTTP headers. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the setHttpHeaders operation. */ +export declare type BlobSetHTTPHeadersResponse = BlobSetHTTPHeadersHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetHTTPHeadersHeaders; + }; +}; + +/** Defines headers for Blob_setImmutabilityPolicy operation. */ +export declare interface BlobSetImmutabilityPolicyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates the time the immutability policy will expire. */ + immutabilityPolicyExpiry?: Date; + /** Indicates immutability policy mode. */ + immutabilityPolicyMode?: BlobImmutabilityPolicyMode; +} + +/** + * Options for setting immutability policy {@link BlobClient.setImmutabilityPolicy} operation. + */ +export declare interface BlobSetImmutabilityPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + modifiedAccessCondition?: ModificationConditions; +} + +/** Contains response data for the setImmutabilityPolicy operation. */ +export declare type BlobSetImmutabilityPolicyResponse = BlobSetImmutabilityPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetImmutabilityPolicyHeaders; + }; +}; + +/** Defines headers for Blob_setLegalHold operation. */ +export declare interface BlobSetLegalHoldHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicates if the blob has a legal hold. */ + legalHold?: boolean; +} + +/** + * Options for setting legal hold {@link BlobClient.setLegalHold} operation. + */ +export declare interface BlobSetLegalHoldOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setLegalHold operation. */ +export declare type BlobSetLegalHoldResponse = BlobSetLegalHoldHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetLegalHoldHeaders; + }; +}; + +/** Defines headers for Blob_setMetadata operation. */ +export declare interface BlobSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the metadata. This header is only returned when the metadata was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setMetadata} operation. + */ +export declare interface BlobSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting blob metadata. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the setMetadata operation. */ +export declare type BlobSetMetadataResponse = BlobSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetMetadataHeaders; + }; +}; + +/** Defines headers for Blob_setTags operation. */ +export declare interface BlobSetTagsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setTags} operation. + */ +export declare interface BlobSetTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet for the blob to perform this operation. + */ + conditions?: TagConditions & LeaseAccessConditions; +} + +/** Contains response data for the setTags operation. */ +export declare type BlobSetTagsResponse = BlobSetTagsHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTagsHeaders; + }; +}; + +/** Defines headers for Blob_setTier operation. */ +export declare interface BlobSetTierHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and newer. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.setAccessTier} operation. + */ +export declare interface BlobSetTierOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; +} + +/** Contains response data for the setTier operation. */ +export declare type BlobSetTierResponse = BlobSetTierHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobSetTierHeaders; + }; +}; + +/** Defines headers for Blob_startCopyFromURL operation. */ +export declare interface BlobStartCopyFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.beginCopyFromURL} operation. + */ +export declare interface BlobStartCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the blob that are being copied. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | PremiumPageBlobTier | string; + /** + * Rehydrate Priority - possible values include 'High', 'Standard'. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-rehydration#rehydrate-an-archived-blob-to-an-online-tier + */ + rehydratePriority?: RehydratePriority; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Overrides the sealed state of the destination blob. Default true. + */ + sealBlob?: boolean; +} + +/** Contains response data for the startCopyFromURL operation. */ +export declare type BlobStartCopyFromURLResponse = BlobStartCopyFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobStartCopyFromURLHeaders; + }; +}; + +/** + * Options to configure the {@link BlobClient.syncCopyFromURL} operation. + */ +export declare interface BlobSyncCopyFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the snapshot. + */ + metadata?: Metadata; + /** + * Conditions to meet for the destination blob when copying from a URL to the blob. + */ + conditions?: BlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to encrypt the data provided in the request. If not specified, encryption is performed with the default account encryption scope. For more information, see Encryption at Rest for Azure Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Default 'REPLACE'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +export declare interface BlobTag { + key: string; + value: string; +} + +/** Blob tags */ +export declare interface BlobTags { + blobTagSet: BlobTag[]; +} + +/** Defines values for BlobType. */ +export declare type BlobType = "BlockBlob" | "PageBlob" | "AppendBlob"; + +/** Defines headers for Blob_undelete operation. */ +export declare interface BlobUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated. */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobClient.undelete} operation. + */ +export declare interface BlobUndeleteOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Contains response data for the undelete operation. */ +export declare type BlobUndeleteResponse = BlobUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlobUndeleteHeaders; + }; +}; + +/** + * Response type for {@link BlockBlobClient.uploadFile}, {@link BlockBlobClient.uploadStream}, and + * {@link BlockBlobClient.uploadBrowserDate}. + */ +export declare type BlobUploadCommonResponse = BlockBlobUploadHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse; +}; + +/** Represents a single block in a block blob. It describes the block's ID and size. */ +export declare interface Block { + /** The base64 encoded block ID. */ + name: string; + /** The block size in bytes. */ + size: number; +} + +/** + * BlockBlobClient defines a set of operations applicable to block blobs. + */ +export declare class BlockBlobClient extends BlobClient { + /** + * blobContext provided by protocol layer. + * + * Note. Ideally BlobClient should set BlobClient.blobContext to protected. However, API + * extractor has issue blocking that. Here we redecelare _blobContext in BlockBlobClient. + */ + private _blobContext; + /** + * blockBlobContext provided by protocol layer. + */ + private blockBlobContext; + /** + * + * Creates an instance of BlockBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of BlockBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a block blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage block blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/blockblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): BlockBlobClient; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - + */ + query(query: string, options?: BlockBlobQueryOptions): Promise; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + upload(body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise; + /** + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. + */ + syncUploadFromURL(sourceURL: string, options?: BlockBlobSyncUploadFromURLOptions): Promise; + /** + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. + */ + stageBlock(blockId: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobStageBlockOptions): Promise; + /** + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. + */ + stageBlockFromURL(blockId: string, sourceURL: string, offset?: number, count?: number, options?: BlockBlobStageBlockFromURLOptions): Promise; + /** + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list + * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. + */ + commitBlockList(blocks: string[], options?: BlockBlobCommitBlockListOptions): Promise; + /** + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + getBlockList(listType: BlockListType, options?: BlockBlobGetBlockListOptions): Promise; + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. + * + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + uploadData(data: Buffer | Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN BROWSERS. + * + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. + * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + * @deprecated Use {@link uploadData} instead. + * + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. + */ + uploadBrowserData(browserData: Blob | ArrayBuffer | ArrayBufferView, options?: BlockBlobParallelUploadOptions): Promise; + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + private uploadSeekableInternal; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadFile(filePath: string, options?: BlockBlobParallelUploadOptions): Promise; + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + uploadStream(stream: Readable, bufferSize?: number, maxConcurrency?: number, options?: BlockBlobUploadStreamOptions): Promise; +} + +/** Defines headers for BlockBlob_commitBlockList operation. */ +export declare interface BlockBlobCommitBlockListHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. This header refers to the content of the request, meaning, in this case, the list of blocks, and not the content of the blob itself. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.commitBlockList} operation. + */ +export declare interface BlockBlobCommitBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when committing the block list. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when committing block list. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when committing block list. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the commitBlockList operation. */ +export declare type BlockBlobCommitBlockListResponse = BlockBlobCommitBlockListHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobCommitBlockListHeaders; + }; +}; + +/** Defines headers for BlockBlob_getBlockList operation. */ +export declare interface BlockBlobGetBlockListHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The media type of the body of the response. For Get Block List this is 'application/xml' */ + contentType?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.getBlockList} operation. + */ +export declare interface BlockBlobGetBlockListOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions & TagConditions; +} + +/** Contains response data for the getBlockList operation. */ +export declare type BlockBlobGetBlockListResponse = BlockBlobGetBlockListHeaders & BlockList & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlockList; + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobGetBlockListHeaders; + }; +}; + +/** + * Option interface for {@link BlockBlobClient.uploadFile} and {@link BlockBlobClient.uploadSeekableStream}. + */ +export declare interface BlockBlobParallelUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Destination block blob size in bytes. + */ + blockSize?: number; + /** + * Blob size threshold in bytes to start concurrency uploading. + * Default value is 256MB, blob size less than this option will + * be uploaded via one I/O operation without concurrency. + * You can customize a value less equal than the default value. + */ + maxSingleShotSize?: number; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Blob HTTP Headers. A common header to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Concurrency of parallel uploading. Must be greater than or equal to 0. + */ + concurrency?: number; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +/** Defines headers for BlockBlob_putBlobFromUrl operation. */ +export declare interface BlockBlobPutBlobFromUrlHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the putBlobFromUrl operation. */ +export declare type BlockBlobPutBlobFromUrlResponse = BlockBlobPutBlobFromUrlHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobPutBlobFromUrlHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.query} operation. + */ +export declare interface BlockBlobQueryOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Configurations for the query input. + */ + inputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryParquetConfiguration; + /** + * Configurations for the query output. + */ + outputTextConfiguration?: BlobQueryJsonTextConfiguration | BlobQueryCsvTextConfiguration | BlobQueryArrowConfiguration; + /** + * Callback to receive events on the progress of query operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Callback to receive error events during the query operaiton. + */ + onError?: (error: BlobQueryError) => void; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; +} + +/** Defines headers for BlockBlob_stageBlockFromURL operation. */ +export declare interface BlockBlobStageBlockFromURLHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlockFromURL} operation. + */ +export declare interface BlockBlobStageBlockFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Specifies the bytes of the source Blob/File to upload. + * If not specified, the entire content is uploaded as a single block. + */ + range?: Range_2; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the stageBlockFromURL operation. */ +export declare type BlockBlobStageBlockFromURLResponse = BlockBlobStageBlockFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockFromURLHeaders; + }; +}; + +/** Defines headers for BlockBlob_stageBlock operation. */ +export declare interface BlockBlobStageBlockHeaders { + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the block. This header is only returned when the block was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.stageBlock} operation. + */ +export declare interface BlockBlobStageBlockOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; + /** + * Callback to receive events on the progress of stage block operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the block content. This hash is used to verify the integrity of the block during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the stageBlock operation. */ +export declare type BlockBlobStageBlockResponse = BlockBlobStageBlockHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobStageBlockHeaders; + }; +}; + +/** + * Options to configure {@link BlockBlobClient.syncUploadFromURL} operation. + */ +export declare interface BlockBlobSyncUploadFromURLOptions extends CommonOptions { + /** + * Server timeout in seconds. + * For more information, @see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/setting-timeouts-for-blob-service-operations + */ + timeoutInSeconds?: number; + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies a user-defined name-value pair associated with the blob. If no name-value + * pairs are specified, the operation will copy the metadata from the source blob or file to the + * destination blob. If one or more name-value pairs are specified, the destination blob is + * created with the specified metadata, and metadata is not copied from the source blob or file. + * Note that beginning with version 2009-09-19, metadata names must adhere to the naming rules + * for C# identifiers. See Naming and Referencing Containers, Blobs, and Metadata for more + * information. + */ + metadata?: Metadata; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Specify the md5 calculated for the range of bytes that must be read from the copy source. + */ + sourceContentMD5?: Uint8Array; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Optional, default is true. Indicates if properties from the source blob should be copied. + */ + copySourceBlobProperties?: boolean; + /** + * HTTP headers to set when uploading to a block blob. + * + * A common header to set is `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Conditions to meet for the destination Azure Blob. + */ + conditions?: BlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Conditions to meet for the source Azure Blob. + */ + sourceConditions?: ModifiedAccessConditions; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; + /** + * Optional, default 'replace'. Indicates if source tags should be copied or replaced with the tags specified by {@link tags}. + */ + copySourceTags?: BlobCopySourceTags; +} + +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +export declare enum BlockBlobTier { + /** + * Optimized for storing data that is accessed frequently. + */ + Hot = "Hot", + /** + * Optimized for storing data that is infrequently accessed and stored for at least 30 days. + */ + Cool = "Cool", + /** + * Optimized for storing data that is rarely accessed and stored for at least 180 days + * with flexible latency requirements (on the order of hours). + */ + Archive = "Archive" +} + +/** Defines headers for BlockBlob_upload operation. */ +export declare interface BlockBlobUploadHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link BlockBlobClient.upload} operation. + */ +export declare interface BlockBlobUploadOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading to the block blob. + */ + conditions?: BlobRequestConditions; + /** + * HTTP headers to set when uploading to a block blob. A common header to set is + * `blobContentType`, enabling the browser to provide functionality + * based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when uploading to a block blob. + */ + metadata?: Metadata; + /** + * Callback to receive events on the progress of upload operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the upload operation. */ +export declare type BlockBlobUploadResponse = BlockBlobUploadHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: BlockBlobUploadHeaders; + }; +}; + +/** + * Option interface for the {@link BlockBlobClient.uploadStream} operation. + */ +export declare interface BlockBlobUploadStreamOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Blob HTTP Headers. + * + * A common header to set is `blobContentType`, enabling the + * browser to provide functionality based on file type. + * + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * Metadata of block blob. + */ + metadata?: { + [propertyName: string]: string; + }; + /** + * Access conditions headers. + */ + conditions?: BlobRequestConditions; + /** + * Progress updater. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Blob tags. + */ + tags?: Tags; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: BlockBlobTier | string; +} + +export declare interface BlockList { + committedBlocks?: Block[]; + uncommittedBlocks?: Block[]; +} + +/** Defines values for BlockListType. */ +export declare type BlockListType = "committed" | "uncommitted" | "all"; + +declare interface ClearRange { + start: number; + end: number; +} + +/** + * Common options of {@link BlobGenerateSasUrlOptions} and {@link ContainerGenerateSasUrlOptions}. + */ +export declare interface CommonGenerateSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols, HTTPS only or HTTPSandHTTP + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The time after which the SAS will no longer work. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The name of the access policy on the container this SAS references if any. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. The cache-control header for the SAS. + */ + cacheControl?: string; + /** + * Optional. The content-disposition header for the SAS. + */ + contentDisposition?: string; + /** + * Optional. The content-encoding header for the SAS. + */ + contentEncoding?: string; + /** + * Optional. The content-language header for the SAS. + */ + contentLanguage?: string; + /** + * Optional. The content-type header for the SAS. + */ + contentType?: string; +} + +/** + * An interface for options common to every remote operation. + */ +export declare interface CommonOptions { + /** + * Options to configure spans created when tracing is enabled. + */ + tracingOptions?: OperationTracingOptions; +} + +/** + * Options to configure Container - Acquire Lease operation. + */ +export declare interface ContainerAcquireLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when acquiring the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Optional parameters. */ +export declare interface ContainerBreakLeaseOptionalParams extends coreHttp.OperationOptions { + /** Parameter group */ + modifiedAccessConditions?: ModifiedAccessConditionsModel; + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; + /** For a break operation, proposed duration the lease should continue before it is broken, in seconds, between 0 and 60. This break period is only used if it is shorter than the time remaining on the lease. If longer, the time remaining on the lease is used. A new lease will not be available before the break period has expired, but the lease may be held for longer than the break period. If this header does not appear with a break operation, a fixed-duration lease breaks after the remaining lease period elapses, and an infinite lease breaks immediately. */ + breakPeriod?: number; +} + +/** + * Options to configure Container - Break Lease operation. + */ +export declare interface ContainerBreakLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when breaking the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Options to configure Container - Change Lease operation. + */ +export declare interface ContainerChangeLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. + */ +export declare class ContainerClient extends StorageClient { + /** + * containerContext provided by protocol layer. + */ + private containerContext; + private _containerName; + /** + * The name of the container. + */ + get containerName(): string; + /** + * + * Creates an instance of ContainerClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of ContainerClient. + * This method accepts an URL pointing to a container. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A URL string pointing to Azure Storage container, such as + * "https://myaccount.blob.core.windows.net/mycontainer". You can + * append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer?sasString". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - Options to Container Create operation. + * + * + * Example usage: + * + * ```js + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); + * ``` + */ + create(options?: ContainerCreateOptions): Promise; + /** + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * + * @param options - + */ + createIfNotExists(options?: ContainerCreateOptions): Promise; + /** + * Returns true if the Azure container resource represented by this client exists; false otherwise. + * + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. + * + * @param options - + */ + exists(options?: ContainerExistsOptions): Promise; + /** + * Creates a {@link BlobClient} + * + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. + */ + getBlobClient(blobName: string): BlobClient; + /** + * Creates an {@link AppendBlobClient} + * + * @param blobName - An append blob name + */ + getAppendBlobClient(blobName: string): AppendBlobClient; + /** + * Creates a {@link BlockBlobClient} + * + * @param blobName - A block blob name + * + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` + */ + getBlockBlobClient(blobName: string): BlockBlobClient; + /** + * Creates a {@link PageBlobClient} + * + * @param blobName - A page blob name + */ + getPageBlobClient(blobName: string): PageBlobClient; + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties + * + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Options to Container Get Properties operation. + */ + getProperties(options?: ContainerGetPropertiesOptions): Promise; + /** + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + delete(options?: ContainerDeleteMethodOptions): Promise; + /** + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * + * @param options - Options to Container Delete operation. + */ + deleteIfExists(options?: ContainerDeleteMethodOptions): Promise; + /** + * Sets one or more user-defined name-value pairs for the specified container. + * + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. + */ + setMetadata(metadata?: Metadata, options?: ContainerSetMetadataOptions): Promise; + /** + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. + * + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. + */ + getAccessPolicy(options?: ContainerGetAccessPolicyOptions): Promise; + /** + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. + * + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. + */ + setAccessPolicy(access?: PublicAccessType, containerAcl?: SignedIdentifier[], options?: ContainerSetAccessPolicyOptions): Promise; + /** + * Get a {@link BlobLeaseClient} that manages leases on the container. + * + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. + */ + getBlobLeaseClient(proposeLeaseId?: string): BlobLeaseClient; + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + uploadBlockBlob(blobName: string, body: HttpRequestBody, contentLength: number, options?: BlockBlobUploadOptions): Promise<{ + blockBlobClient: BlockBlobClient; + response: BlockBlobUploadResponse; + }>; + /** + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. + */ + deleteBlob(blobName: string, options?: ContainerDeleteBlobOptions): Promise; + /** + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. + */ + private listBlobFlatSegment; + /** + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. + */ + private listBlobHierarchySegment; + /** + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listSegments; + /** + * Returns an AsyncIterableIterator of {@link BlobItem} objects + * + * @param options - Options to list blobs operation. + */ + private listItems; + /** + * Returns an async iterable iterator to list all the blobs + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` + * let i = 1; + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * @param options - Options to list blobs. + * @returns An asyncIterableIterator that supports paging. + */ + listBlobsFlat(options?: ContainerListBlobsOptions): PagedAsyncIterableIterator; + /** + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the ContinuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list blobs operation. + */ + private listHierarchySegments; + /** + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + private listItemsByHierarchy; + /** + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. + * + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * + * Example using `for await` syntax: + * + * ```js + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * Example using paging with a max page size: + * + * ```js + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); + * + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; + * + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } + * } + * ``` + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. + */ + listBlobsByHierarchy(delimiter: string, options?: ContainerListBlobsOptions): PagedAsyncIterableIterator<({ + kind: "prefix"; + } & BlobPrefix) | ({ + kind: "blob"; + } & BlobItem), ContainerListBlobHierarchySegmentResponse>; + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegment; + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + private findBlobsByTagsSegments; + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + private findBlobsByTagsItems; + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression: string, options?: ContainerFindBlobByTagsOptions): PagedAsyncIterableIterator; + private getContainerNameFromUrl; + /** + * Only available for ContainerClient constructed with a shared key credential. + * + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options: ContainerGenerateSasUrlOptions): Promise; + /** + * Creates a BlobBatchClient object to conduct batch operations. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. + */ + getBlobBatchClient(): BlobBatchClient; +} + +/** Defines headers for Container_create operation. */ +export declare interface ContainerCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.createIfNotExists} operation. + */ +export declare interface ContainerCreateIfNotExistsResponse extends ContainerCreateResponse { + /** + * Indicate whether the container is successfully created. Is false when the container is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.create} operation. + */ +export declare interface ContainerCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A collection of key-value string pair to associate with the container. + */ + metadata?: Metadata; + /** + * Specifies whether data in the container may be accessed publicly and the level of access. Possible values include: + * - `container`: Specifies full public read access for container and blob data. Clients can enumerate blobs within the container via anonymous request, but cannot enumerate containers within the storage account. + * - `blob`: Specifies public read access for blobs. Blob data within this container can be read via anonymous request, but container data is not available. Clients cannot enumerate blobs within the container via anonymous request. + */ + access?: PublicAccessType; + /** + * Container encryption scope info. + */ + containerEncryptionScope?: ContainerEncryptionScope; +} + +/** Contains response data for the create operation. */ +export declare type ContainerCreateResponse = ContainerCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerCreateHeaders; + }; +}; + +/** + * Options to configure the {@link ContainerClient.deleteBlob} operation. + */ +export declare interface ContainerDeleteBlobOptions extends BlobDeleteOptions { + /** + * An opaque DateTime value that, when present, specifies the version + * of the blob to delete. It's for service version 2019-10-10 and newer. + */ + versionId?: string; +} + +/** Defines headers for Container_delete operation. */ +export declare interface ContainerDeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the {@link ContainerClient.deleteIfExists} operation. + */ +export declare interface ContainerDeleteIfExistsResponse extends ContainerDeleteResponse { + /** + * Indicate whether the container is successfully deleted. Is false if the container does not exist in the first place. + */ + succeeded: boolean; +} + +/** + * Options to configure {@link ContainerClient.delete} operation. + */ +export declare interface ContainerDeleteMethodOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when deleting the container. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the delete operation. */ +export declare type ContainerDeleteResponse = ContainerDeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerDeleteHeaders; + }; +}; + +/** Parameter group */ +export declare interface ContainerEncryptionScope { + /** Optional. Version 2019-07-07 and later. Specifies the default encryption scope to set on the container and use for all future writes. */ + defaultEncryptionScope?: string; + /** Optional. Version 2019-07-07 and newer. If true, prevents any request from specifying a different encryption scope than the scope set on the container. */ + preventEncryptionScopeOverride?: boolean; +} + +/** + * Options to configure {@link ContainerClient.exists} operation. + */ +export declare interface ContainerExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Defines headers for Container_filterBlobs operation. */ +export declare interface ContainerFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ContainerFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ContainerFindBlobsByTagsSegmentResponse = FilterBlobSegment & ContainerFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link ContainerClient.generateSasUrl} operation. + */ +export declare interface ContainerGenerateSasUrlOptions extends CommonGenerateSasUrlOptions { + /** + * Optional only when identifier is provided. Specifies the list of permissions to be associated with the SAS. + */ + permissions?: ContainerSASPermissions; +} + +/** Defines headers for Container_getAccessPolicy operation. */ +export declare interface ContainerGetAccessPolicyHeaders { + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getAccessPolicy} operation. + */ +export declare interface ContainerGetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** + * Contains response data for the {@link ContainerClient.getAccessPolicy} operation. + */ +export declare type ContainerGetAccessPolicyResponse = { + signedIdentifiers: SignedIdentifier[]; +} & ContainerGetAccessPolicyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerGetAccessPolicyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: SignedIdentifierModel[]; + }; +}; + +/** Defines headers for Container_getProperties operation. */ +export declare interface ContainerGetPropertiesHeaders { + metadata?: { + [propertyName: string]: string; + }; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** When a blob is leased, specifies whether the lease is of infinite or fixed duration. */ + leaseDuration?: LeaseDurationType; + /** Lease state of the blob. */ + leaseState?: LeaseStateType; + /** The current lease status of the blob. */ + leaseStatus?: LeaseStatusType; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Indicated whether data in the container may be accessed publicly and the level of access */ + blobPublicAccess?: PublicAccessType; + /** Indicates whether the container has an immutability policy set on it. */ + hasImmutabilityPolicy?: boolean; + /** Indicates whether the container has a legal hold. */ + hasLegalHold?: boolean; + /** The default encryption scope for the container. */ + defaultEncryptionScope?: string; + /** Indicates whether the container's default encryption scope can be overriden. */ + denyEncryptionScopeOverride?: boolean; + /** Indicates whether version level worm is enabled on a container. */ + isImmutableStorageWithVersioningEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.getProperties} operation. + */ +export declare interface ContainerGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: LeaseAccessConditions; +} + +/** Contains response data for the getProperties operation. */ +export declare type ContainerGetPropertiesResponse = ContainerGetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerGetPropertiesHeaders; + }; +}; + +/** An Azure Storage container */ +export declare interface ContainerItem { + name: string; + deleted?: boolean; + version?: string; + /** Properties of a container */ + properties: ContainerProperties; + /** Dictionary of */ + metadata?: { + [propertyName: string]: string; + }; +} + +/** Defines headers for Container_listBlobFlatSegment operation. */ +export declare interface ContainerListBlobFlatSegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobFlatSegment operation. + */ +export declare type ContainerListBlobFlatSegmentResponse = ListBlobsFlatSegmentResponse & ContainerListBlobFlatSegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobFlatSegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsFlatSegmentResponseModel; + }; +}; + +/** Defines headers for Container_listBlobHierarchySegment operation. */ +export declare interface ContainerListBlobHierarchySegmentHeaders { + /** The media type of the body of the response. For List Blobs this is 'application/xml' */ + contentType?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Contains response data for the listBlobHierarchySegment operation. + */ +export declare type ContainerListBlobHierarchySegmentResponse = ListBlobsHierarchySegmentResponse & ContainerListBlobHierarchySegmentHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ContainerListBlobHierarchySegmentHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: ListBlobsHierarchySegmentResponseModel; + }; +}; + +/** + * Options to configure Container - List Blobs operations. + * + * See: + * - {@link ContainerClient.listBlobsFlat} + * - {@link ContainerClient.listBlobsByHierarchy} + */ +export declare interface ContainerListBlobsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether metadata related to any current or previous Copy Blob operation should be included in the response. + */ + includeCopy?: boolean; + /** + * Specifies whether soft deleted blobs should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether blob metadata be returned in the response. + */ + includeMetadata?: boolean; + /** + * Specifies whether snapshots should be included in the enumeration. Snapshots are listed from oldest to newest in the response. + */ + includeSnapshots?: boolean; + /** + * Specifies whether versions should be included in the enumeration. Versions are listed from oldest to newest in the response. + */ + includeVersions?: boolean; + /** + * Specifies whether blobs for which blocks have been uploaded, but which have not been committed using Put Block List, be included in the response. + */ + includeUncommitedBlobs?: boolean; + /** + * Specifies whether blob tags be returned in the response. + */ + includeTags?: boolean; + /** + * Specifies whether deleted blob with versions be returned in the response. + */ + includeDeletedWithVersions?: boolean; + /** + * Specifies whether blob immutability policy be returned in the response. + */ + includeImmutabilityPolicy?: boolean; + /** + * Specifies whether blob legal hold be returned in the response. + */ + includeLegalHold?: boolean; +} + +/** Properties of a container */ +export declare interface ContainerProperties { + lastModified: Date; + etag: string; + leaseStatus?: LeaseStatusType; + leaseState?: LeaseStateType; + leaseDuration?: LeaseDurationType; + publicAccess?: PublicAccessType; + hasImmutabilityPolicy?: boolean; + hasLegalHold?: boolean; + defaultEncryptionScope?: string; + preventEncryptionScopeOverride?: boolean; + deletedOn?: Date; + remainingRetentionDays?: number; + /** Indicates if version level worm is enabled on this container. */ + isImmutableStorageWithVersioningEnabled?: boolean; +} + +/** + * Options to configure Container - Release Lease operation. + */ +export declare interface ContainerReleaseLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when releasing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for Container_rename operation. */ +export declare interface ContainerRenameHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the rename operation. */ +export declare type ContainerRenameResponse = ContainerRenameHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerRenameHeaders; + }; +}; + +/** + * Options to configure Container - Renew Lease operation. + */ +export declare interface ContainerRenewLeaseOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when renewing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Conditions to meet for the container. + */ +export declare interface ContainerRequestConditions extends LeaseAccessConditions, ModificationConditions { +} + +/** + * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. + * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. + * Once all the values are set, this should be serialized with toString and set as the permissions field on a + * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but + * the order of the permissions is particular and this class guarantees correctness. + */ +export declare class ContainerSASPermissions { + /** + * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an + * Error if it encounters a character that does not correspond to a valid permission. + * + * @param permissions - + */ + static parse(permissions: string): ContainerSASPermissions; + /** + * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - + */ + static from(permissionLike: ContainerSASPermissionsLike): ContainerSASPermissions; + /** + * Specifies Read access granted. + */ + read: boolean; + /** + * Specifies Add access granted. + */ + add: boolean; + /** + * Specifies Create access granted. + */ + create: boolean; + /** + * Specifies Write access granted. + */ + write: boolean; + /** + * Specifies Delete access granted. + */ + delete: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion: boolean; + /** + * Specifies List access granted. + */ + list: boolean; + /** + * Specfies Tag access granted. + */ + tag: boolean; + /** + * Specifies Move access granted. + */ + move: boolean; + /** + * Specifies Execute access granted. + */ + execute: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags: boolean; + /** + * Converts the given permissions to a string. Using this method will guarantee the permissions are in an + * order accepted by the service. + * + * The order of the characters should be as specified here to ensure correctness. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + */ + toString(): string; +} + +/** + * A type that looks like a Container SAS permission. + * Used in {@link ContainerSASPermissions} to parse SAS permissions from raw objects. + */ +export declare interface ContainerSASPermissionsLike { + /** + * Specifies Read access granted. + */ + read?: boolean; + /** + * Specifies Add access granted. + */ + add?: boolean; + /** + * Specifies Create access granted. + */ + create?: boolean; + /** + * Specifies Write access granted. + */ + write?: boolean; + /** + * Specifies Delete access granted. + */ + delete?: boolean; + /** + * Specifies Delete version access granted. + */ + deleteVersion?: boolean; + /** + * Specifies List access granted. + */ + list?: boolean; + /** + * Specfies Tag access granted. + */ + tag?: boolean; + /** + * Specifies Move access granted. + */ + move?: boolean; + /** + * Specifies Execute access granted. + */ + execute?: boolean; + /** + * Specifies SetImmutabilityPolicy access granted. + */ + setImmutabilityPolicy?: boolean; + /** + * Specifies that Permanent Delete is permitted. + */ + permanentDelete?: boolean; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + filterByTags?: boolean; +} + +/** Defines headers for Container_setAccessPolicy operation. */ +export declare interface ContainerSetAccessPolicyHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setAccessPolicy} operation. + */ +export declare interface ContainerSetAccessPolicyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when setting the access policy. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setAccessPolicy operation. */ +export declare type ContainerSetAccessPolicyResponse = ContainerSetAccessPolicyHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetAccessPolicyHeaders; + }; +}; + +/** Defines headers for Container_setMetadata operation. */ +export declare interface ContainerSetMetadataHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link ContainerClient.setMetadata} operation. + */ +export declare interface ContainerSetMetadataOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * If specified, contains the lease id that must be matched and lease with this id + * must be active in order for the operation to succeed. + */ + conditions?: ContainerRequestConditions; +} + +/** Contains response data for the setMetadata operation. */ +export declare type ContainerSetMetadataResponse = ContainerSetMetadataHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerSetMetadataHeaders; + }; +}; + +/** Defines headers for Container_restore operation. */ +export declare interface ContainerUndeleteHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the restore operation. */ +export declare type ContainerUndeleteResponse = ContainerUndeleteHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ContainerUndeleteHeaders; + }; +}; + +/** + * Defines the operations from a {@link BlobClient} that are needed for the poller + * returned by {@link BlobClient.beginCopyFromURL} to work. + */ +export declare type CopyPollerBlobClient = Pick & { + startCopyFromURL(copySource: string, options?: BlobStartCopyFromURLOptions): Promise; +}; + +/** Defines values for CopyStatusType. */ +export declare type CopyStatusType = "pending" | "success" | "aborted" | "failed"; + +/** CORS is an HTTP feature that enables a web application running under one domain to access resources in another domain. Web browsers implement a security restriction known as same-origin policy that prevents a web page from calling APIs in a different domain; CORS provides a secure way to allow one domain (the origin domain) to call APIs in another domain */ +export declare interface CorsRule { + /** The origin domains that are permitted to make a request against the storage service via CORS. The origin domain is the domain from which the request originates. Note that the origin must be an exact case-sensitive match with the origin that the user age sends to the service. You can also use the wildcard character '*' to allow all origin domains to make requests via CORS. */ + allowedOrigins: string; + /** The methods (HTTP request verbs) that the origin domain may use for a CORS request. (comma separated) */ + allowedMethods: string; + /** the request headers that the origin domain may specify on the CORS request. */ + allowedHeaders: string; + /** The response headers that may be sent in the response to the CORS request and exposed by the browser to the request issuer */ + exposedHeaders: string; + /** The maximum amount time that a browser should cache the preflight OPTIONS request. */ + maxAgeInSeconds: number; +} + +/** Parameter group */ +export declare interface CpkInfo { + /** Optional. Specifies the encryption key to use to encrypt the data provided in the request. If not specified, encryption is performed with the root account encryption key. For more information, see Encryption at Rest for Azure Storage Services. */ + encryptionKey?: string; + /** The SHA-256 hash of the provided encryption key. Must be provided if the x-ms-encryption-key header is provided. */ + encryptionKeySha256?: string; + /** The algorithm used to produce the encryption key hash. Currently, the only accepted value is "AES256". Must be provided if the x-ms-encryption-key header is provided. */ + encryptionAlgorithm?: EncryptionAlgorithmType; +} + +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +declare abstract class Credential_2 implements RequestPolicyFactory { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy: RequestPolicy, _options: RequestPolicyOptions): RequestPolicy; +} +export { Credential_2 as Credential } + +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +export declare abstract class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; +} + +/** + * A factory function that creates a new CredentialPolicy that uses the provided nextPolicy. + */ +export declare type CredentialPolicyCreator = (nextPolicy: RequestPolicy, options: RequestPolicyOptions) => CredentialPolicy; + +/** Defines values for DeleteSnapshotsOptionType. */ +export declare type DeleteSnapshotsOptionType = "include" | "only"; + +export { deserializationPolicy } + +/** Defines values for EncryptionAlgorithmType. */ +export declare type EncryptionAlgorithmType = "AES256"; + +/** + * Blob info from a {@link BlobServiceClient.findBlobsByTags} + */ +export declare interface FilterBlobItem { + /** + * Blob Name. + */ + name: string; + /** + * Container Name. + */ + containerName: string; + /** + * Blob Tags. + */ + tags?: Tags; + /** + * Tag value. + * + * @deprecated The service no longer returns this value. Use {@link tags} to fetch all matching Blob Tags. + */ + tagValue: string; +} + +/** Blob info from a Filter Blobs API call */ +export declare interface FilterBlobItemModel { + name: string; + containerName: string; + /** Blob tags */ + tags?: BlobTags; +} + +/** + * Segment response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface FilterBlobSegment { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItem[]; + continuationToken?: string; +} + +/** The result of a Filter Blobs API call */ +export declare interface FilterBlobSegmentModel { + serviceEndpoint: string; + where: string; + blobs: FilterBlobItemModel[]; + continuationToken?: string; +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual + * REST request. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + * @param accountSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateAccountSASQueryParameters(accountSASSignatureValues: AccountSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * Fill in the required details before running the following snippets. + * + * Example usage: + * + * ```js + * // Generate service level SAS for a container + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using an identifier: + * + * ```js + * // Generate service level SAS for a container with identifier + * // startsOn & permissions are optional when identifier is provided + * const identifier = "unique-id"; + * await containerClient.setAccessPolicy(undefined, [ + * { + * accessPolicy: { + * expiresOn: new Date(new Date().valueOf() + 86400), // Date type + * permissions: ContainerSASPermissions.parse("racwdl").toString(), + * startsOn: new Date() // Date type + * }, + * id: identifier + * } + * ]); + * + * const containerSAS = generateBlobSASQueryParameters( + * { + * containerName, // Required + * identifier // Required + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * Example using a blob name: + * + * ```js + * // Generate service level SAS for a blob + * const blobSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * blobName, // Required + * permissions: BlobSASPermissions.parse("racwd"), // Required + * startsOn: new Date(), // Optional + * expiresOn: new Date(new Date().valueOf() + 86400), // Required. Date type + * cacheControl: "cache-control-override", // Optional + * contentDisposition: "content-disposition-override", // Optional + * contentEncoding: "content-encoding-override", // Optional + * contentLanguage: "content-language-override", // Optional + * contentType: "content-type-override", // Optional + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2016-05-31" // Optional + * }, + * sharedKeyCredential // StorageSharedKeyCredential - `new StorageSharedKeyCredential(account, accountKey)` + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, sharedKeyCredential: StorageSharedKeyCredential): SASQueryParameters; + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Creates an instance of SASQueryParameters. + * WARNING: identifier will be ignored when generating user delegation SAS, permissions and expiresOn are required. + * + * Example usage: + * + * ```js + * // Generate user delegation SAS for a container + * const userDelegationKey = await blobServiceClient.getUserDelegationKey(startsOn, expiresOn); + * const containerSAS = generateBlobSASQueryParameters({ + * containerName, // Required + * permissions: ContainerSASPermissions.parse("racwdl"), // Required + * startsOn, // Optional. Date type + * expiresOn, // Required. Date type + * ipRange: { start: "0.0.0.0", end: "255.255.255.255" }, // Optional + * protocol: SASProtocol.HttpsAndHttp, // Optional + * version: "2018-11-09" // Must greater than or equal to 2018-11-09 to generate user delegation SAS + * }, + * userDelegationKey, // UserDelegationKey + * accountName + * ).toString(); + * ``` + * + * @param blobSASSignatureValues - + * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` + * @param accountName - + */ +export declare function generateBlobSASQueryParameters(blobSASSignatureValues: BlobSASSignatureValues, userDelegationKey: UserDelegationKey, accountName: string): SASQueryParameters; + +/** Geo-Replication information for the Secondary Storage Service */ +export declare interface GeoReplication { + /** The status of the secondary location */ + status: GeoReplicationStatusType; + /** A GMT date/time value, to the second. All primary writes preceding this value are guaranteed to be available for read operations at the secondary. Primary writes after this point in time may or may not be available for reads. */ + lastSyncOn: Date; +} + +/** Defines values for GeoReplicationStatusType. */ +export declare type GeoReplicationStatusType = "live" | "bootstrap" | "unavailable"; + +/** + * Represents authentication information in Authorization, ProxyAuthorization, + * WWW-Authenticate, and Proxy-Authenticate header values. + */ +export declare interface HttpAuthorization { + /** + * The scheme to use for authorization. + */ + scheme: string; + /** + * the credentials containing the authentication information of the user agent for the resource being requested. + */ + value: string; +} + +export { HttpHeaders } + +export { HttpOperationResponse } + +export { HttpRequestBody } + +export { IHttpClient } + +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +export declare function isPipelineLike(pipeline: unknown): pipeline is PipelineLike; + +/** + * The details for a specific lease. + */ +export declare interface Lease { + /** + * The ETag contains a value that you can use to + * perform operations conditionally. If the request version is 2011-08-18 or + * newer, the ETag value will be in quotes. + */ + etag?: string; + /** + * Returns the date and time the container was + * last modified. Any operation that modifies the blob, including an update + * of the blob's metadata or properties, changes the last-modified time of + * the blob. + */ + lastModified?: Date; + /** + * Uniquely identifies a container's lease + */ + leaseId?: string; + /** + * Approximate time remaining in the lease + * period, in seconds. + */ + leaseTime?: number; + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + */ + requestId?: string; + /** + * Indicates the version of the Blob service used + * to execute the request. This header is returned for requests made against + * version 2009-09-19 and above. + */ + version?: string; + /** + * UTC date/time value generated by the service that + * indicates the time at which the response was initiated + */ + date?: Date; + /** + * Error code if any associated with the response that returned + * the Lease information. + */ + errorCode?: string; +} + +/** Parameter group */ +export declare interface LeaseAccessConditions { + /** If specified, the operation only succeeds if the resource's lease is active and matches this ID. */ + leaseId?: string; +} + +/** Defines values for LeaseDurationType. */ +export declare type LeaseDurationType = "infinite" | "fixed"; + +/** + * Configures lease operations. + */ +export declare interface LeaseOperationOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when changing the lease. + */ + conditions?: ModifiedAccessConditions; +} + +/** + * Contains the response data for operations that create, modify, or delete a lease. + * + * See {@link BlobLeaseClient}. + */ +export declare type LeaseOperationResponse = Lease & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: Lease; + }; +}; + +/** Defines values for LeaseStateType. */ +export declare type LeaseStateType = "available" | "leased" | "expired" | "breaking" | "broken"; + +/** Defines values for LeaseStatusType. */ +export declare type LeaseStatusType = "locked" | "unlocked"; + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsFlatSegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsFlatSegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + segment: BlobFlatListSegmentModel; + continuationToken?: string; +} + +/** + * An enumeration of blobs + */ +export declare interface ListBlobsHierarchySegmentResponse { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegment; + continuationToken?: string; +} + +/** An enumeration of blobs */ +export declare interface ListBlobsHierarchySegmentResponseModel { + serviceEndpoint: string; + containerName: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + delimiter?: string; + segment: BlobHierarchyListSegmentModel; + continuationToken?: string; +} + +/** An enumeration of containers */ +export declare interface ListContainersSegmentResponse { + serviceEndpoint: string; + prefix?: string; + marker?: string; + maxPageSize?: number; + containerItems: ContainerItem[]; + continuationToken?: string; +} + +/** + * The `@azure/logger` configuration for this package. + */ +export declare const logger: AzureLogger; + +/** Azure Analytics Logging settings. */ +export declare interface Logging { + /** The version of Storage Analytics to configure. */ + version: string; + /** Indicates whether all delete requests should be logged. */ + deleteProperty: boolean; + /** Indicates whether all read requests should be logged. */ + read: boolean; + /** Indicates whether all write requests should be logged. */ + write: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on ETag matching. + */ +export declare interface MatchConditions { + /** + * Specify an ETag value to operate only on blobs with a matching value. + */ + ifMatch?: string; + /** + * Specify an ETag value to operate only on blobs without a matching value. + */ + ifNoneMatch?: string; +} + +/** + * A map of name-value pairs to associate with the resource. + */ +export declare interface Metadata { + /** + * A name-value pair. + */ + [propertyName: string]: string; +} + +/** a summary of request statistics grouped by API in hour or minute aggregates for blobs */ +export declare interface Metrics { + /** The version of Storage Analytics to configure. */ + version?: string; + /** Indicates whether metrics are enabled for the Blob service. */ + enabled: boolean; + /** Indicates whether metrics should generate summary statistics for called API operations. */ + includeAPIs?: boolean; + /** the retention policy which determines how long the associated data should persist */ + retentionPolicy?: RetentionPolicy; +} + +/** + * Specifies HTTP options for conditional requests based on modification time. + */ +export declare interface ModificationConditions { + /** + * Specify this header value to operate only on a blob if it has been modified since the + * specified date/time. + */ + ifModifiedSince?: Date; + /** + * Specify this header value to operate only on a blob if it has not been modified since the + * specified date/time. + */ + ifUnmodifiedSince?: Date; +} + +/** + * standard HTTP conditional headers and tags condition. + */ +export declare interface ModifiedAccessConditions extends MatchConditions, ModificationConditions, TagConditions { +} + +/** Parameter group */ +export declare interface ModifiedAccessConditionsModel { + /** Specify this header value to operate only on a blob if it has been modified since the specified date/time. */ + ifModifiedSince?: Date; + /** Specify this header value to operate only on a blob if it has not been modified since the specified date/time. */ + ifUnmodifiedSince?: Date; + /** Specify an ETag value to operate only on blobs with a matching value. */ + ifMatch?: string; + /** Specify an ETag value to operate only on blobs without a matching value. */ + ifNoneMatch?: string; + /** Specify a SQL where clause on blob tags to operate only on blobs with a matching value. */ + ifTags?: string; +} + +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +export declare function newPipeline(credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, pipelineOptions?: StoragePipelineOptions): Pipeline; + +/** + * Contains Object Replication Policy ID and the respective list of {@link ObjectReplicationRule}. + * This is used when retrieving the Object Replication Properties on the source blob. The policy id for the + * destination blob is set in ObjectReplicationDestinationPolicyId of the respective method responses + * (e.g. {@link BlobProperties.ObjectReplicationDestinationPolicyId}. + */ +export declare interface ObjectReplicationPolicy { + /** + * The Object Replication Policy ID. + */ + policyId: string; + /** + * The Rule ID(s) and respective Replication Status(s) that are under the Policy ID. + */ + rules: ObjectReplicationRule[]; +} + +/** + * Contains the Object Replication Rule ID and {@link ObjectReplicationStatus} of a blob. + * There can be more than one {@link ObjectReplicationRule} under a {@link ObjectReplicationPolicy}. + */ +export declare interface ObjectReplicationRule { + /** + * The Object Replication Rule ID. + */ + ruleId: string; + /** + * The Replication Status + */ + replicationStatus: ObjectReplicationStatus; +} + +/** + * Specifies the Replication Status of a blob. This is used when a storage account has + * Object Replication Policy(s) applied. See {@link ObjectReplicationPolicy} and {@link ObjectReplicationRule}. + */ +export declare type ObjectReplicationStatus = "complete" | "failed"; + +/** Defines headers for PageBlob_clearPages operation. */ +export declare interface PageBlobClearPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.clearPages} operation. + */ +export declare interface PageBlobClearPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when clearing pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the clearPages operation. */ +export declare type PageBlobClearPagesResponse = PageBlobClearPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobClearPagesHeaders; + }; +}; + +/** + * PageBlobClient defines a set of operations applicable to page blobs. + */ +export declare class PageBlobClient extends BlobClient { + /** + * pageBlobsContext provided by protocol layer. + */ + private pageBlobContext; + /** + * + * Creates an instance of PageBlobClient. + * + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param containerName - Container name. + * @param blobName - Blob name. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * If a blob name includes ? or %, blob name must be encoded in the URL. + * + * @param url - A Client string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". You can append a SAS + * if using AnonymousCredential, such as "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param options - Optional. Options to configure the HTTP pipeline. + */ + constructor(url: string, credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions); + /** + * Creates an instance of PageBlobClient. + * + * @param url - A URL string pointing to Azure Storage page blob, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob". + * You can append a SAS if using AnonymousCredential, such as + * "https://myaccount.blob.core.windows.net/mycontainer/pageblob?sasString". + * This method accepts an encoded URL or non-encoded URL pointing to a blob. + * Encoded URL string will NOT be escaped twice, only special characters in URL path will be escaped. + * However, if a blob name includes ? or %, blob name must be encoded in the URL. + * Such as a blob named "my?blob%", the URL should be "https://myaccount.blob.core.windows.net/mycontainer/my%3Fblob%25". + * @param pipeline - Call newPipeline() to create a default + * pipeline, or provide a customized pipeline. + */ + constructor(url: string, pipeline: PipelineLike); + /** + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. + */ + withSnapshot(snapshot: string): PageBlobClient; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. + */ + create(size: number, options?: PageBlobCreateOptions): Promise; + /** + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param size - size of the page blob. + * @param options - + */ + createIfNotExists(size: number, options?: PageBlobCreateIfNotExistsOptions): Promise; + /** + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. + */ + uploadPages(body: HttpRequestBody, offset: number, count: number, options?: PageBlobUploadPagesOptions): Promise; + /** + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - + */ + uploadPagesFromURL(sourceURL: string, sourceOffset: number, destOffset: number, count: number, options?: PageBlobUploadPagesFromURLOptions): Promise; + /** + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. + */ + clearPages(offset?: number, count?: number, options?: PageBlobClearPagesOptions): Promise; + /** + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. + */ + getPageRanges(offset?: number, count?: number, options?: PageBlobGetPageRangesOptions): Promise; + /** + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. + */ + private listPageRangesSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. + */ + private listPageRangeItems; + /** + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRanges(offset?: number, count?: number, options?: PageBlobListPageRangesOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangesDiffSegment; + /** + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} + * + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItemSegments; + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + private listPageRangeDiffItems; + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset: number, count: number, prevSnapshot: string, options?: PageBlobListPageRangesDiffOptions): PagedAsyncIterableIterator; + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + getPageRangesDiffForManagedDisks(offset: number, count: number, prevSnapshotUrl: string, options?: PageBlobGetPageRangesDiffOptions): Promise; + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + resize(size: number, options?: PageBlobResizeOptions): Promise; + /** + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. + */ + updateSequenceNumber(sequenceNumberAction: SequenceNumberActionType, sequenceNumber?: number, options?: PageBlobUpdateSequenceNumberOptions): Promise; + /** + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. + */ + startCopyIncremental(copySource: string, options?: PageBlobStartCopyIncrementalOptions): Promise; +} + +/** Defines headers for PageBlob_copyIncremental operation. */ +export declare interface PageBlobCopyIncrementalHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** String identifier for this copy operation. Use with Get Blob Properties to check the status of this copy operation, or pass to Abort Copy Blob to abort a pending copy. */ + copyId?: string; + /** State of the copy operation identified by x-ms-copy-id. */ + copyStatus?: CopyStatusType; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the copyIncremental operation. */ +export declare type PageBlobCopyIncrementalResponse = PageBlobCopyIncrementalHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCopyIncrementalHeaders; + }; +}; + +/** Defines headers for PageBlob_create operation. */ +export declare interface PageBlobCreateHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** A DateTime value returned by the service that uniquely identifies the blob. The value of this header indicates the blob version, and may be used in subsequent requests to access this version of the blob. */ + versionId?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; +} + +/** + * Contains response data for the {@link PageBlobClient.createIfNotExists} operation. + */ +export declare interface PageBlobCreateIfNotExistsResponse extends PageBlobCreateResponse { + /** + * Indicate whether the blob is successfully created. Is false when the blob is not changed as it already exists. + */ + succeeded: boolean; +} + +/** + * Options to configure the {@link PageBlobClient.create} operation. + */ +export declare interface PageBlobCreateOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when creating a page blob. + */ + conditions?: BlobRequestConditions; + /** + * A user-controlled value that can be used to track requests. + * The value must be between 0 and 2^63 - 1. The default value is 0. + */ + blobSequenceNumber?: number; + /** + * HTTP headers to set when creating a page blob. + */ + blobHTTPHeaders?: BlobHTTPHeaders; + /** + * A collection of key-value string pair to associate with the blob when creating append blobs. + */ + metadata?: Metadata; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Optional. Specifies immutability policy for a blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + immutabilityPolicy?: BlobImmutabilityPolicy; + /** + * Optional. Indicates if a legal hold should be placed on the blob. + * Note that is parameter is only applicable to a blob within a container that + * has version level worm enabled. + */ + legalHold?: boolean; + /** + * Access tier. + * More Details - https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-storage-tiers + */ + tier?: PremiumPageBlobTier | string; + /** + * Blob tags. + */ + tags?: Tags; +} + +/** Contains response data for the create operation. */ +export declare type PageBlobCreateResponse = PageBlobCreateHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobCreateHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRangesDiff operation. */ +export declare interface PageBlobGetPageRangesDiffHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; + /** + * (unused) + */ + range?: string; +} + +/** + * Contains response data for the {@link BlobClient.getPageRangesDiff} operation. + */ +export declare interface PageBlobGetPageRangesDiffResponse extends PageList, PageBlobGetPageRangesDiffHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRangesDiff operation. */ +export declare type PageBlobGetPageRangesDiffResponseModel = PageBlobGetPageRangesDiffHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesDiffHeaders; + }; +}; + +/** Defines headers for PageBlob_getPageRanges operation. */ +export declare interface PageBlobGetPageRangesHeaders { + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** The size of the blob in bytes. */ + blobContentLength?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Contains response data for the {@link BlobClient.getPageRanges} operation. + */ +export declare interface PageBlobGetPageRangesResponse extends PageList, PageBlobGetPageRangesHeaders { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: PageBlobGetPageRangesHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: PageList; + }; +} + +/** Contains response data for the getPageRanges operation. */ +export declare type PageBlobGetPageRangesResponseModel = PageBlobGetPageRangesHeaders & PageList_2 & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: PageList_2; + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobGetPageRangesHeaders; + }; +}; + +/** + * Options to configure the {@link PageBlobClient.listPageRangesDiff} operation. + */ +export declare interface PageBlobListPageRangesDiffOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges diff. + */ + conditions?: BlobRequestConditions; +} + +/** + * Options to configure the {@link PageBlobClient.listPageRanges} operation. + */ +export declare interface PageBlobListPageRangesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when getting page ranges. + */ + conditions?: BlobRequestConditions; +} + +/** + * Conditions to add to the creation of this page blob. + */ +export declare interface PageBlobRequestConditions extends BlobRequestConditions, SequenceNumberAccessConditions { +} + +/** Defines headers for PageBlob_resize operation. */ +export declare interface PageBlobResizeHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.resize} operation. + */ +export declare interface PageBlobResizeOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when resizing a page blob. + */ + conditions?: BlobRequestConditions; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the resize operation. */ +export declare type PageBlobResizeResponse = PageBlobResizeHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobResizeHeaders; + }; +}; + +/** + * Options to configure {@link PageBlobClient.startCopyIncremental} operation. + */ +export declare interface PageBlobStartCopyIncrementalOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when starting a copy incremental operation. + */ + conditions?: ModifiedAccessConditions; +} + +/** Defines headers for PageBlob_updateSequenceNumber operation. */ +export declare interface PageBlobUpdateSequenceNumberHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** The current sequence number for a page blob. This header is not returned for block blobs or append blobs */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.updateSequenceNumber} operation. + */ +export declare interface PageBlobUpdateSequenceNumberOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: BlobRequestConditions; +} + +/** Contains response data for the updateSequenceNumber operation. */ +export declare type PageBlobUpdateSequenceNumberResponse = PageBlobUpdateSequenceNumberHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUpdateSequenceNumberHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPagesFromURL operation. */ +export declare interface PageBlobUploadPagesFromURLHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the blob. This header is only returned when the blob was encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure {@link PageBlobClient.uploadPagesFromURL} operation. + */ +export declare interface PageBlobUploadPagesFromURLOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when updating sequence number. + */ + conditions?: PageBlobRequestConditions; + /** + * Conditions to meet for the source Azure Blob/File when copying from a URL to the blob. + */ + sourceConditions?: MatchConditions & ModificationConditions; + /** + * An MD5 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content from the URI. + * This hash is used to verify the integrity of the content during transport of the data from the URI. + * When this is specified, the storage service compares the hash of the content that has arrived from the copy-source with this value. + * + * sourceContentMD5 and sourceContentCrc64 cannot be set at same time. + */ + sourceContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; + /** + * Only Bearer type is supported. Credentials should be a valid OAuth access token to copy source. + */ + sourceAuthorization?: HttpAuthorization; +} + +/** Contains response data for the uploadPagesFromURL operation. */ +export declare type PageBlobUploadPagesFromURLResponse = PageBlobUploadPagesFromURLHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesFromURLHeaders; + }; +}; + +/** Defines headers for PageBlob_uploadPages operation. */ +export declare interface PageBlobUploadPagesHeaders { + /** The ETag contains a value that you can use to perform operations conditionally. If the request version is 2011-08-18 or newer, the ETag value will be in quotes. */ + etag?: string; + /** Returns the date and time the container was last modified. Any operation that modifies the blob, including an update of the blob's metadata or properties, changes the last-modified time of the blob. */ + lastModified?: Date; + /** If the blob has an MD5 hash and this operation is to read the full blob, this response header is returned so that the client can check for message content integrity. */ + contentMD5?: Uint8Array; + /** This header is returned so that the client can check for message content integrity. The value of this header is computed by the Blob service; it is not necessarily the same value specified in the request headers. */ + xMsContentCrc64?: Uint8Array; + /** The current sequence number for the page blob. */ + blobSequenceNumber?: number; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** The value of this header is set to true if the contents of the request are successfully encrypted using the specified algorithm, and false otherwise. */ + isServerEncrypted?: boolean; + /** The SHA-256 hash of the encryption key used to encrypt the pages. This header is only returned when the pages were encrypted with a customer-provided key. */ + encryptionKeySha256?: string; + /** Returns the name of the encryption scope used to encrypt the blob contents and application metadata. Note that the absence of this header implies use of the default account encryption scope. */ + encryptionScope?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link PageBlobClient.uploadPages} operation. + */ +export declare interface PageBlobUploadPagesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Conditions to meet when uploading pages. + */ + conditions?: PageBlobRequestConditions; + /** + * Callback to receive events on the progress of upload pages operation. + */ + onProgress?: (progress: TransferProgressEvent) => void; + /** + * An MD5 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentMD5?: Uint8Array; + /** + * A CRC64 hash of the content. This hash is used to verify the integrity of the content during transport. + * When this is specified, the storage service compares the hash of the content that has arrived with this value. + * + * transactionalContentMD5 and transactionalContentCrc64 cannot be set at same time. + */ + transactionalContentCrc64?: Uint8Array; + /** + * Customer Provided Key Info. + */ + customerProvidedKey?: CpkInfo; + /** + * Optional. Version 2019-07-07 and later. Specifies the name of the encryption scope to use to + * encrypt the data provided in the request. If not specified, encryption is performed with the + * default account encryption scope. For more information, see Encryption at Rest for Azure + * Storage Services. + */ + encryptionScope?: string; +} + +/** Contains response data for the uploadPages operation. */ +export declare type PageBlobUploadPagesResponse = PageBlobUploadPagesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: PageBlobUploadPagesHeaders; + }; +}; + +/** + * List of page ranges for a blob. + */ +export declare interface PageList { + /** + * Valid non-overlapping page ranges. + */ + pageRange?: Range_2[]; + /** + * Present if the prevSnapshot parameter was specified and there were cleared + * pages between the previous snapshot and the target snapshot. + */ + clearRange?: Range_2[]; +} + +/** the list of pages */ +declare interface PageList_2 { + pageRange?: PageRange[]; + clearRange?: ClearRange[]; + continuationToken?: string; +} + +declare interface PageRange { + start: number; + end: number; +} + +export declare interface PageRangeInfo { + start: number; + end: number; + isClear: boolean; +} + +/** + * The multipart/mixed response which contains the response for each subrequest. + */ +export declare interface ParsedBatchResponse { + /** + * The parsed sub responses. + */ + subResponses: BatchSubResponse[]; + /** + * The succeeded executed sub responses' count; + */ + subResponsesSucceededCount: number; + /** + * The failed executed sub responses' count; + */ + subResponsesFailedCount: number; +} + +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare class Pipeline implements PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories: RequestPolicyFactory[], options?: PipelineOptions); + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * An interface for the {@link Pipeline} class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +export declare interface PipelineLike { + /** + * A list of chained request policy factories. + */ + readonly factories: RequestPolicyFactory[]; + /** + * Configures pipeline logger and HTTP client. + */ + readonly options: PipelineOptions; + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions(): ServiceClientOptions; +} + +/** + * Option interface for Pipeline constructor. + */ +export declare interface PipelineOptions { + /** + * Optional. Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; +} + +export { PollerLike } + +export { PollOperationState } + +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +export declare enum PremiumPageBlobTier { + /** + * P4 Tier. + */ + P4 = "P4", + /** + * P6 Tier. + */ + P6 = "P6", + /** + * P10 Tier. + */ + P10 = "P10", + /** + * P15 Tier. + */ + P15 = "P15", + /** + * P20 Tier. + */ + P20 = "P20", + /** + * P30 Tier. + */ + P30 = "P30", + /** + * P40 Tier. + */ + P40 = "P40", + /** + * P50 Tier. + */ + P50 = "P50", + /** + * P60 Tier. + */ + P60 = "P60", + /** + * P70 Tier. + */ + P70 = "P70", + /** + * P80 Tier. + */ + P80 = "P80" +} + +/** Defines values for PublicAccessType. */ +export declare type PublicAccessType = "container" | "blob"; + +/** + * Range for Blob Service Operations. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/specifying-the-range-header-for-blob-service-operations + */ +declare interface Range_2 { + /** + * StartByte, larger than or equal 0. + */ + offset: number; + /** + * Optional. Count of bytes, larger than 0. + * If not provided, will return bytes from offset to the end. + */ + count?: number; +} +export { Range_2 as Range } + +/** Defines values for RehydratePriority. */ +export declare type RehydratePriority = "High" | "Standard"; + +export { RequestPolicy } + +export { RequestPolicyFactory } + +export { RequestPolicyOptions } + +export { RestError } + +/** the retention policy which determines how long the associated data should persist */ +export declare interface RetentionPolicy { + /** Indicates whether a retention policy is enabled for the storage service */ + enabled: boolean; + /** Indicates the number of days that metrics or logging or soft-deleted data should be retained. All data older than this value will be deleted */ + days?: number; +} + +/** + * Allowed IP range for a SAS. + */ +export declare interface SasIPRange { + /** + * Starting IP address in the IP range. + * If end IP doesn't provide, start IP will the only IP allowed. + */ + start: string; + /** + * Optional. IP address that ends the IP range. + * If not provided, start IP will the only IP allowed. + */ + end?: string; +} + +/** + * Protocols for generated SAS. + */ +export declare enum SASProtocol { + /** + * Protocol that allows HTTPS only + */ + Https = "https", + /** + * Protocol that allows both HTTPS and HTTP + */ + HttpsAndHttp = "https,http" +} + +/** + * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly + * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} + * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should + * be taken here in case there are existing query parameters, which might affect the appropriate means of appending + * these query parameters). + * + * NOTE: Instances of this class are immutable. + */ +export declare class SASQueryParameters { + /** + * The storage API version. + */ + readonly version: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + readonly protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + readonly startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + readonly expiresOn?: Date; + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + readonly permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + readonly services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + readonly resourceTypes?: string; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + readonly identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + readonly encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + readonly resource?: string; + /** + * The signature for the SAS token. + */ + readonly signature: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + readonly cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + readonly contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + readonly contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + readonly contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + readonly contentType?: string; + /** + * Inner value of getter ipRange. + */ + private readonly ipRangeInner?; + /** + * The Azure Active Directory object ID in GUID format. + * Property of user delegation key. + */ + private readonly signedOid?; + /** + * The Azure Active Directory tenant ID in GUID format. + * Property of user delegation key. + */ + private readonly signedTenantId?; + /** + * The date-time the key is active. + * Property of user delegation key. + */ + private readonly signedStartsOn?; + /** + * The date-time the key expires. + * Property of user delegation key. + */ + private readonly signedExpiresOn?; + /** + * Abbreviation of the Azure Storage service that accepts the user delegation key. + * Property of user delegation key. + */ + private readonly signedService?; + /** + * The service version that created the user delegation key. + * Property of user delegation key. + */ + private readonly signedVersion?; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This is only used for User Delegation SAS. + */ + readonly preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + readonly correlationId?: string; + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange(): SasIPRange | undefined; + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param permissions - Representing the storage permissions + * @param services - Representing the storage services being accessed (only for Account SAS) + * @param resourceTypes - Representing the storage resource types being accessed (only for Account SAS) + * @param protocol - Representing the allowed HTTP protocol(s) + * @param startsOn - Representing the start time for this SAS token + * @param expiresOn - Representing the expiry time for this SAS token + * @param ipRange - Representing the range of valid IP addresses for this SAS token + * @param identifier - Representing the signed identifier (only for Service SAS) + * @param resource - Representing the storage container or blob (only for Service SAS) + * @param cacheControl - Representing the cache-control header (only for Blob/File Service SAS) + * @param contentDisposition - Representing the content-disposition header (only for Blob/File Service SAS) + * @param contentEncoding - Representing the content-encoding header (only for Blob/File Service SAS) + * @param contentLanguage - Representing the content-language header (only for Blob/File Service SAS) + * @param contentType - Representing the content-type header (only for Blob/File Service SAS) + * @param userDelegationKey - Representing the user delegation key properties + * @param preauthorizedAgentObjectId - Representing the authorized AAD Object ID (only for User Delegation SAS) + * @param correlationId - Representing the correlation ID (only for User Delegation SAS) + * @param encryptionScope - + */ + constructor(version: string, signature: string, permissions?: string, services?: string, resourceTypes?: string, protocol?: SASProtocol, startsOn?: Date, expiresOn?: Date, ipRange?: SasIPRange, identifier?: string, resource?: string, cacheControl?: string, contentDisposition?: string, contentEncoding?: string, contentLanguage?: string, contentType?: string, userDelegationKey?: UserDelegationKey, preauthorizedAgentObjectId?: string, correlationId?: string, encryptionScope?: string); + /** + * Creates an instance of SASQueryParameters. + * + * @param version - Representing the storage version + * @param signature - Representing the signature for the SAS token + * @param options - Optional. Options to construct the SASQueryParameters. + */ + constructor(version: string, signature: string, options?: SASQueryParametersOptions); + /** + * Encodes all SAS query parameters into a string that can be appended to a URL. + * + */ + toString(): string; + /** + * A private helper method used to filter and append query key/value pairs into an array. + * + * @param queries - + * @param key - + * @param value - + */ + private tryAppendQueryParameter; +} + +/** + * Options to construct {@link SASQueryParameters}. + */ +export declare interface SASQueryParametersOptions { + /** + * Optional only when identifier is provided. + * Please refer to {@link AccountSASPermissions}, {@link BlobSASPermissions}, or {@link ContainerSASPermissions} for + * more details. + */ + permissions?: string; + /** + * Optional. The storage services being accessed (only for Account SAS). Please refer to {@link AccountSASServices} + * for more details. + */ + services?: string; + /** + * Optional. The storage resource types being accessed (only for Account SAS). Please refer to + * {@link AccountSASResourceTypes} for more details. + */ + resourceTypes?: string; + /** + * Optional. The allowed HTTP protocol(s). + */ + protocol?: SASProtocol; + /** + * Optional. The start time for this SAS token. + */ + startsOn?: Date; + /** + * Optional only when identifier is provided. The expiry time for this SAS token. + */ + expiresOn?: Date; + /** + * Optional. IP ranges allowed in this SAS. + */ + ipRange?: SasIPRange; + /** + * Optional. The signed identifier (only for {@link BlobSASSignatureValues}). + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/establishing-a-stored-access-policy + */ + identifier?: string; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; + /** + * Optional. Specifies which resources are accessible via the SAS (only for {@link BlobSASSignatureValues}). + * @see https://docs.microsoft.com/rest/api/storageservices/create-service-sas#specifying-the-signed-resource-blob-service-only + */ + resource?: string; + /** + * Value for cache-control header in Blob/File Service SAS. + */ + cacheControl?: string; + /** + * Value for content-disposition header in Blob/File Service SAS. + */ + contentDisposition?: string; + /** + * Value for content-encoding header in Blob/File Service SAS. + */ + contentEncoding?: string; + /** + * Value for content-length header in Blob/File Service SAS. + */ + contentLanguage?: string; + /** + * Value for content-type header in Blob/File Service SAS. + */ + contentType?: string; + /** + * User delegation key properties. + */ + userDelegationKey?: UserDelegationKey; + /** + * Authorized AAD Object ID in GUID format. The AAD Object ID of a user authorized by the owner of the User Delegation Key + * to perform the action granted by the SAS. The Azure Storage service will ensure that the owner of the user delegation key + * has the required permissions before granting access but no additional permission check for the user specified in + * this value will be performed. This cannot be used in conjuction with {@link signedUnauthorizedUserObjectId}. + * This is only used for User Delegation SAS. + */ + preauthorizedAgentObjectId?: string; + /** + * A GUID value that will be logged in the storage diagnostic logs and can be used to correlate SAS generation with storage resource access. + * This is only used for User Delegation SAS. + */ + correlationId?: string; +} + +/** Parameter group */ +export declare interface SequenceNumberAccessConditions { + /** Specify this header value to operate only on a blob if it has a sequence number less than or equal to the specified. */ + ifSequenceNumberLessThanOrEqualTo?: number; + /** Specify this header value to operate only on a blob if it has a sequence number less than the specified. */ + ifSequenceNumberLessThan?: number; + /** Specify this header value to operate only on a blob if it has the specified sequence number. */ + ifSequenceNumberEqualTo?: number; +} + +/** Defines values for SequenceNumberActionType. */ +export declare type SequenceNumberActionType = "max" | "update" | "increment"; + +/** Defines headers for Service_filterBlobs operation. */ +export declare interface ServiceFilterBlobsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare interface ServiceFindBlobByTagsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * The response of {@link BlobServiceClient.findBlobsByTags} operation. + */ +export declare type ServiceFindBlobsByTagsSegmentResponse = FilterBlobSegment & ServiceFilterBlobsHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceFilterBlobsHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: FilterBlobSegmentModel; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.generateAccountSasUrl} operation. + */ +export declare interface ServiceGenerateAccountSasUrlOptions { + /** + * The version of the service this SAS will target. If not specified, it will default to the version targeted by the + * library. + */ + version?: string; + /** + * Optional. SAS protocols allowed. + */ + protocol?: SASProtocol; + /** + * Optional. When the SAS will take effect. + */ + startsOn?: Date; + /** + * Optional. IP range allowed. + */ + ipRange?: SasIPRange; + /** + * Optional. Encryption scope to use when sending requests authorized with this SAS URI. + */ + encryptionScope?: string; +} + +/** Defines headers for Service_getAccountInfo operation. */ +export declare interface ServiceGetAccountInfoHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Identifies the sku name of the account */ + skuName?: SkuName; + /** Identifies the account kind */ + accountKind?: AccountKind; + /** Version 2019-07-07 and newer. Indicates if the account has a hierarchical namespace enabled. */ + isHierarchicalNamespaceEnabled?: boolean; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getAccountInfo} operation. + */ +export declare interface ServiceGetAccountInfoOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getAccountInfo operation. */ +export declare type ServiceGetAccountInfoResponse = ServiceGetAccountInfoHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetAccountInfoHeaders; + }; +}; + +/** Defines headers for Service_getProperties operation. */ +export declare interface ServiceGetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getProperties} operation. + */ +export declare interface ServiceGetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getProperties operation. */ +export declare type ServiceGetPropertiesResponse = ServiceGetPropertiesHeaders & BlobServiceProperties & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceProperties; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_getStatistics operation. */ +export declare interface ServiceGetStatisticsHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.getStatistics} operation. + */ +export declare interface ServiceGetStatisticsOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the getStatistics operation. */ +export declare type ServiceGetStatisticsResponse = ServiceGetStatisticsHeaders & BlobServiceStatistics & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: BlobServiceStatistics; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceGetStatisticsHeaders; + }; +}; + +/** Defines headers for Service_getUserDelegationKey operation. */ +export declare interface ServiceGetUserDelegationKeyHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** UTC date/time value generated by the service that indicates the time at which the response was initiated */ + date?: Date; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the Service - Get User Delegation Key. + */ +export declare interface ServiceGetUserDelegationKeyOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** + * Contains response data for the {@link getUserDelegationKey} operation. + */ +export declare type ServiceGetUserDelegationKeyResponse = UserDelegationKey & ServiceGetUserDelegationKeyHeaders & { + /** + * The underlying HTTP response. + */ + _response: HttpResponse & { + /** + * The parsed HTTP response headers. + */ + parsedHeaders: ServiceGetUserDelegationKeyHeaders; + /** + * The response body as text (string format) + */ + bodyAsText: string; + /** + * The response body as parsed JSON or XML + */ + parsedBody: UserDelegationKeyModel; + }; +}; + +/** + * Options to configure the {@link BlobServiceClient.listContainers} operation. + */ +export declare interface ServiceListContainersOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Filters the results to return only containers + * whose name begins with the specified prefix. + */ + prefix?: string; + /** + * Specifies whether the container's metadata + * should be returned as part of the response body. + */ + includeMetadata?: boolean; + /** + * Specifies whether soft deleted containers should be included in the response. + */ + includeDeleted?: boolean; + /** + * Specifies whether system containers should be included in the response. + */ + includeSystem?: boolean; +} + +/** Defines headers for Service_listContainersSegment operation. */ +export declare interface ServiceListContainersSegmentHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** Contains response data for the listContainersSegment operation. */ +export declare type ServiceListContainersSegmentResponse = ServiceListContainersSegmentHeaders & ListContainersSegmentResponse & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The response body as text (string format) */ + bodyAsText: string; + /** The response body as parsed JSON or XML */ + parsedBody: ListContainersSegmentResponse; + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceListContainersSegmentHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.renameContainer} operation. + */ +export declare interface ServiceRenameContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Condition to meet for the source container. + */ + sourceCondition?: LeaseAccessConditions; +} + +/** Defines headers for Service_setProperties operation. */ +export declare interface ServiceSetPropertiesHeaders { + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** Error Code */ + errorCode?: string; +} + +/** + * Options to configure the {@link BlobServiceClient.setProperties} operation. + */ +export declare interface ServiceSetPropertiesOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; +} + +/** Contains response data for the setProperties operation. */ +export declare type ServiceSetPropertiesResponse = ServiceSetPropertiesHeaders & { + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSetPropertiesHeaders; + }; +}; + +/** Defines headers for Service_submitBatch operation. */ +export declare interface ServiceSubmitBatchHeaders { + /** The media type of the body of the response. For batch requests, this is multipart/mixed; boundary=batchresponse_GUID */ + contentType?: string; + /** This header uniquely identifies the request that was made and can be used for troubleshooting the request. */ + requestId?: string; + /** Indicates the version of the Blob service used to execute the request. This header is returned for requests made against version 2009-09-19 and above. */ + version?: string; + /** If a client request id header is sent in the request, this header will be present in the response with the same value. */ + clientRequestId?: string; + /** Error Code */ + errorCode?: string; +} + +/** Optional parameters. */ +export declare interface ServiceSubmitBatchOptionalParamsModel extends coreHttp.OperationOptions { + /** The timeout parameter is expressed in seconds. For more information, see Setting Timeouts for Blob Service Operations. */ + timeoutInSeconds?: number; + /** Provides a client-generated, opaque value with a 1 KB character limit that is recorded in the analytics logs when storage analytics logging is enabled. */ + requestId?: string; +} + +/** Contains response data for the submitBatch operation. */ +export declare type ServiceSubmitBatchResponseModel = ServiceSubmitBatchHeaders & { + /** + * BROWSER ONLY + * + * The response body as a browser Blob. + * Always `undefined` in node.js. + */ + blobBody?: Promise; + /** + * NODEJS ONLY + * + * The response body as a node.js Readable stream. + * Always `undefined` in the browser. + */ + readableStreamBody?: NodeJS.ReadableStream; + /** The underlying HTTP response. */ + _response: coreHttp.HttpResponse & { + /** The parsed HTTP response headers. */ + parsedHeaders: ServiceSubmitBatchHeaders; + }; +}; + +/** + * Options to configure {@link BlobServiceClient.undeleteContainer} operation. + */ +export declare interface ServiceUndeleteContainerOptions extends CommonOptions { + /** + * An implementation of the `AbortSignalLike` interface to signal the request to cancel the operation. + * For example, use the @azure/abort-controller to create an `AbortSignal`. + */ + abortSignal?: AbortSignalLike; + /** + * Optional. Specifies the new name of the restored container. + * Will use its original name if this is not specified. + * @deprecated Restore container to a different name is not supported by service anymore. + */ + destinationContainerName?: string; +} + +/** + * Signed identifier. + */ +export declare interface SignedIdentifier { + /** + * a unique id + */ + id: string; + /** + * Access Policy + */ + accessPolicy: { + /** + * Optional. The date-time the policy is active + */ + startsOn?: Date; + /** + * Optional. The date-time the policy expires + */ + expiresOn?: Date; + /** + * The permissions for the acl policy + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + */ + permissions?: string; + }; +} + +/** signed identifier */ +export declare interface SignedIdentifierModel { + /** a unique id */ + id: string; + /** An Access policy */ + accessPolicy: AccessPolicy; +} + +/** Defines values for SkuName. */ +export declare type SkuName = "Standard_LRS" | "Standard_GRS" | "Standard_RAGRS" | "Standard_ZRS" | "Premium_LRS"; + +/** The properties that enable an account to host a static website */ +export declare interface StaticWebsite { + /** Indicates whether this account is hosting a static website */ + enabled: boolean; + /** The default name of the index page under each directory */ + indexDocument?: string; + /** The absolute path of the custom 404 page */ + errorDocument404Path?: string; + /** Absolute path of the default index page */ + defaultIndexDocumentPath?: string; +} + +/** + * Defines the known cloud audiences for Storage. + */ +export declare enum StorageBlobAudience { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageOAuthScopes = "https://storage.azure.com/.default", + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + DiskComputeOAuthScopes = "https://disk.compute.azure.com/.default" +} + +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +export declare class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions); + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; +} + +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +export declare class StorageBrowserPolicyFactory implements RequestPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageBrowserPolicy; +} + +/** + * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} + * and etc. + */ +declare abstract class StorageClient { + /** + * Encoded URL string value. + */ + readonly url: string; + readonly accountName: string; + /* Excluded from this release type: pipeline */ + /** + * Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + */ + readonly credential: StorageSharedKeyCredential | AnonymousCredential | TokenCredential; + /** + * StorageClient is a reference to protocol layer operations entry, which is + * generated by AutoRest generator. + */ + protected readonly storageClientContext: StorageClientContext; + /** + */ + protected readonly isHttps: boolean; + /** + * Creates an instance of StorageClient. + * @param url - url to resource + * @param pipeline - request policy pipeline. + */ + protected constructor(url: string, pipeline: PipelineLike); +} + +declare class StorageClientContext extends coreHttp.ServiceClient { + url: string; + version: string; + /** + * Initializes a new instance of the StorageClientContext class. + * @param url The URL of the service account, container, or blob that is the target of the desired + * operation. + * @param options The parameter options + */ + constructor(url: string, options?: StorageClientOptionalParams); +} + +/** Optional parameters. */ +declare interface StorageClientOptionalParams extends coreHttp.ServiceClientOptions { + /** Specifies the version of the operation to use for this request. */ + version?: string; + /** Overrides client endpoint. */ + endpoint?: string; +} + +/** + * The OAuth scope to use with Azure Storage. + */ +export declare const StorageOAuthScopes: string | string[]; + +/** + * Options interface for the {@link newPipeline} function. + */ +export declare interface StoragePipelineOptions { + /** + * Options to configure a proxy for outgoing requests. + */ + proxyOptions?: ProxyOptions; + /** + * Options for adding user agent details to outgoing requests. + */ + userAgentOptions?: UserAgentOptions; + /** + * Configures the built-in retry policy behavior. + */ + retryOptions?: StorageRetryOptions; + /** + * Keep alive configurations. Default keep-alive is enabled. + */ + keepAliveOptions?: KeepAliveOptions; + /** + * Configures the HTTP client to send requests and receive responses. + */ + httpClient?: IHttpClient; + /** + * The audience used to retrieve an AAD token. + */ + audience?: string | string[]; +} + +/** + * Storage Blob retry options interface. + */ +export declare interface StorageRetryOptions { + /** + * Optional. StorageRetryPolicyType, default is exponential retry policy. + */ + readonly retryPolicyType?: StorageRetryPolicyType; + /** + * Optional. Max try number of attempts, default is 4. + * A value of 1 means 1 try and no retries. + * A value smaller than 1 means default retry number of attempts. + */ + readonly maxTries?: number; + /** + * Optional. Indicates the maximum time in ms allowed for any single try of an HTTP request. + * A value of zero or undefined means no default timeout on SDK client, Azure + * Storage server's default timeout policy will be used. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations + */ + readonly tryTimeoutInMs?: number; + /** + * Optional. Specifies the amount of delay to use before retrying an operation (default is 4s or 4 * 1000ms). + * The delay increases (exponentially or linearly) with each retry up to a maximum specified by + * maxRetryDelayInMs. If you specify 0, then you must also specify 0 for maxRetryDelayInMs. + */ + readonly retryDelayInMs?: number; + /** + * Optional. Specifies the maximum delay allowed before retrying an operation (default is 120s or 120 * 1000ms). + * If you specify 0, then you must also specify 0 for retryDelayInMs. + */ + readonly maxRetryDelayInMs?: number; + /** + * If a secondaryHost is specified, retries will be tried against this host. If secondaryHost is undefined + * (the default) then operations are not retried against another host. + * + * NOTE: Before setting this field, make sure you understand the issues around + * reading stale and potentially-inconsistent data at + * {@link https://docs.microsoft.com/en-us/azure/storage/common/storage-designing-ha-apps-with-ragrs} + */ + readonly secondaryHost?: string; +} + +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export declare class StorageRetryPolicy extends BaseRequestPolicy { + /** + * RetryOptions. + */ + private readonly retryOptions; + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, retryOptions?: StorageRetryOptions); + /** + * Sends request. + * + * @param request - + */ + sendRequest(request: WebResource): Promise; + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + protected attemptSendRequest(request: WebResource, secondaryHas404: boolean, attempt: number): Promise; + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + protected shouldRetry(isPrimaryRetry: boolean, attempt: number, response?: HttpOperationResponse, err?: RestError): boolean; + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + private delay; +} + +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +export declare class StorageRetryPolicyFactory implements RequestPolicyFactory { + private retryOptions?; + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions?: StorageRetryOptions); + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageRetryPolicy; +} + +/** + * RetryPolicy types. + */ +export declare enum StorageRetryPolicyType { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + EXPONENTIAL = 0, + /** + * Linear retry. Retry time delay grows linearly. + */ + FIXED = 1 +} + +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +export declare class StorageSharedKeyCredential extends Credential_2 { + /** + * Azure Storage account name; readonly. + */ + readonly accountName: string; + /** + * Azure Storage account key; readonly. + */ + private readonly accountKey; + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName: string, accountKey: string); + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy: RequestPolicy, options: RequestPolicyOptions): StorageSharedKeyCredentialPolicy; + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign: string): string; +} + +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +export declare class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Reference to StorageSharedKeyCredential which generates StorageSharedKeyCredentialPolicy + */ + private readonly factory; + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy: RequestPolicy, options: RequestPolicyOptions, factory: StorageSharedKeyCredential); + /** + * Signs request. + * + * @param request - + */ + protected signRequest(request: WebResource): WebResource; + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + private getHeaderValueToSign; + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + private getCanonicalizedHeadersString; + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + private getCanonicalizedResourceString; +} + +/** Defines values for SyncCopyStatusType. */ +export declare type SyncCopyStatusType = "success"; + +/** + * Specifies HTTP options for conditional requests based on blob tags. + */ +export declare interface TagConditions { + /** + * Optional SQL statement to apply to the tags of the blob. + */ + tagConditions?: string; +} + +/** + * Blob tags. + */ +export declare type Tags = Record; + +/** + * A user delegation key. + */ +export declare interface UserDelegationKey { + /** + * The Azure Active Directory object ID in GUID format. + */ + signedObjectId: string; + /** + * The Azure Active Directory tenant ID in GUID format. + */ + signedTenantId: string; + /** + * The date-time the key is active. + */ + signedStartsOn: Date; + /** + * The date-time the key expires. + */ + signedExpiresOn: Date; + /** + * Abbreviation of the Azure Storage service that accepts the key. + */ + signedService: string; + /** + * The service version that created the key. + */ + signedVersion: string; + /** + * The key as a base64 string. + */ + value: string; +} + +/** A user delegation key */ +export declare interface UserDelegationKeyModel { + /** The Azure Active Directory object ID in GUID format. */ + signedObjectId: string; + /** The Azure Active Directory tenant ID in GUID format */ + signedTenantId: string; + /** The date-time the key is active */ + signedStartsOn: string; + /** The date-time the key expires */ + signedExpiresOn: string; + /** Abbreviation of the Azure Storage service that accepts the key */ + signedService: string; + /** The service version that created the key */ + signedVersion: string; + /** The key as a base64 string */ + value: string; +} + +export { WebResource } + +export { } diff --git a/node_modules/@opentelemetry/api/CHANGELOG.md b/node_modules/@opentelemetry/api/CHANGELOG.md new file mode 100644 index 0000000000..2eff2035c8 --- /dev/null +++ b/node_modules/@opentelemetry/api/CHANGELOG.md @@ -0,0 +1,204 @@ +# CHANGELOG + +All notable changes to this project will be documented in this file. + +## [1.1.0](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.4...v1.1.0) (2022-01-25) + + +### Features + +* add tracestate implementation to api ([#147](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/147)) ([82842c7](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/82842c7097614e6ece99e73838ac5e94ff5460b7)) +* define common attributes type ([#142](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/142)) ([ae9bead](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/ae9bead17750d35dec4b63cfae098087666abc85)) +* **trace:** add optional schema url to TracerProvider.getTracer ([#129](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/129)) ([aa65fc6](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/aa65fc66809d45090d6e4951c265386d17ccc6f6)) + + +### Bug Fixes + +* export tracer options ([#154](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/154)) ([b125324](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/b125324438fb2f24eb80c7c6673afc8cfc99575e)) + +### [1.0.4](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.3...v1.0.4) (2021-12-18) + + +### Bug Fixes + +* align globalThis fallbacks with otel-core ([#126](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/126)) ([3507de7](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/3507de7c3b95396696657c021953b0b24a63a029)) + +### [1.0.3](https://www.github.com/open-telemetry/opentelemetry-js-api/compare/v1.0.2...v1.0.3) (2021-08-30) + + +### Bug Fixes + +* remove all circular dependencies ([#119](https://www.github.com/open-telemetry/opentelemetry-js-api/issues/119)) ([a8083e8](https://www.github.com/open-telemetry/opentelemetry-js-api/commit/a8083e84b23227828745da80fd5fe512357dd34b)) + +## 1.0.2 + +### :bug: Bug Fix + +* [#105](https://github.com/open-telemetry/opentelemetry-js-api/pull/105) fix: set delegate after successful registration ([@Flarna](https://github.com/Flarna)) +* [#94](https://github.com/open-telemetry/opentelemetry-js-api/pull/94) fix: enforce strict equality on prerelease versions ([@dyladan](https://github.com/dyladan)) + +### :memo: Documentation + +* [#106](https://github.com/open-telemetry/opentelemetry-js-api/pull/106) docs: fix crash in README example ([@trentm](https://github.com/trentm)) +* [#101](https://github.com/open-telemetry/opentelemetry-js-api/pull/101) docs: Format example for tracer.startActiveSpan ([@ad-m](https://github.com/ad-m)) +* [#99](https://github.com/open-telemetry/opentelemetry-js-api/pull/99) chore: fix link to API docs ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#109](https://github.com/open-telemetry/opentelemetry-js-api/pull/109) internal: add missing approvers from core ([@dyladan](https://github.com/dyladan)) +* [#103](https://github.com/open-telemetry/opentelemetry-js-api/pull/103) chore: reuse NoopTracer in ProxyTracer ([@Flarna](https://github.com/Flarna)) + +### Committers: 4 + +* Adam Dobrawy ([@ad-m](https://github.com/ad-m)) +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Trent Mick ([@trentm](https://github.com/trentm)) + +## 1.0.1 + +### :bug: Bug Fix + +* [#96](https://github.com/open-telemetry/opentelemetry-js-api/pull/96) chore: remove circular dependency ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## 1.0.0 + +### :memo: Documentation + +* [#89](https://github.com/open-telemetry/opentelemetry-js-api/pull/89) chore: update upgrade guidelines ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#90](https://github.com/open-telemetry/opentelemetry-js-api/pull/90) chore: enable typescript 4.3 noImplicitOverride option ([@Flarna](https://github.com/Flarna)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) + +## 0.21.0 + +### :boom: Breaking Change + +* [#78](https://github.com/open-telemetry/opentelemetry-js-api/pull/78) feat: unify signatures of `with` and `bind` ([@Rauno56](https://github.com/Rauno56)) +* [#46](https://github.com/open-telemetry/opentelemetry-js-api/pull/46) chore: do not export singletons ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#81](https://github.com/open-telemetry/opentelemetry-js-api/pull/81) chore: function overloads implementation of startActiveSpan in noop t… ([@naseemkullah](https://github.com/naseemkullah)) + +### :house: Internal + +* [#84](https://github.com/open-telemetry/opentelemetry-js-api/pull/84) chore: remove unused backwards compatibility folder ([@Flarna](https://github.com/Flarna)) +* [#85](https://github.com/open-telemetry/opentelemetry-js-api/pull/85) chore: add node:16 to the test matrix ([@Rauno56](https://github.com/Rauno56)) +* [#63](https://github.com/open-telemetry/opentelemetry-js-api/pull/63) feat: debug log global registrations and logger overwrites ([@Rauno56](https://github.com/Rauno56)) +* [#75](https://github.com/open-telemetry/opentelemetry-js-api/pull/75) Add CodeQL Security Scan ([@xukaren](https://github.com/xukaren)) +* [#79](https://github.com/open-telemetry/opentelemetry-js-api/pull/79) chore: fix eslint config ([@Rauno56](https://github.com/Rauno56)) + +### Committers: 5 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Karen Xu ([@xukaren](https://github.com/xukaren)) +* Naseem ([@naseemkullah](https://github.com/naseemkullah)) +* Rauno Viskus ([@Rauno56](https://github.com/Rauno56)) + +## 0.20.0 + +### :rocket: Enhancement + +* [#69](https://github.com/open-telemetry/opentelemetry-js-api/pull/69) feat(context): add utils method to remove keys from context ([@vmarchaud](https://github.com/vmarchaud)) +* [#71](https://github.com/open-telemetry/opentelemetry-js-api/pull/71) chore: export baggage ([@dyladan](https://github.com/dyladan)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Valentin Marchaud ([@vmarchaud](https://github.com/vmarchaud)) + +## 0.19.0 + +### :boom: Breaking Change + +* [#55](https://github.com/open-telemetry/opentelemetry-js-api/pull/55) chore: move baggage methods in propagation namespace ([@vmarchaud](https://github.com/vmarchaud)) +* [#65](https://github.com/open-telemetry/opentelemetry-js-api/pull/65) chore: remove suppress instrumentation ([@dyladan](https://github.com/dyladan)) +* [#60](https://github.com/open-telemetry/opentelemetry-js-api/pull/60) chore: removing timed event ([@obecny](https://github.com/obecny)) +* [#58](https://github.com/open-telemetry/opentelemetry-js-api/pull/58) chore: use spancontext for link ([@dyladan](https://github.com/dyladan)) +* [#47](https://github.com/open-telemetry/opentelemetry-js-api/pull/47) chore: move span method for context in trace API #40 ([@vmarchaud](https://github.com/vmarchaud)) +* [#45](https://github.com/open-telemetry/opentelemetry-js-api/pull/45) chore: rename `span#context()` to `span#spanContext` ([@dyladan](https://github.com/dyladan)) +* [#43](https://github.com/open-telemetry/opentelemetry-js-api/pull/43) chore: renaming noop span to non recording span ([@obecny](https://github.com/obecny)) +* [#32](https://github.com/open-telemetry/opentelemetry-js-api/pull/32) feat!: return boolean success value from setGlobalXXX methods ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#62](https://github.com/open-telemetry/opentelemetry-js-api/pull/62) chore: adding component logger ([@obecny](https://github.com/obecny)) +* [#54](https://github.com/open-telemetry/opentelemetry-js-api/pull/54) feat: add tracer.startActiveSpan() ([@naseemkullah](https://github.com/naseemkullah)) +* [#58](https://github.com/open-telemetry/opentelemetry-js-api/pull/58) chore: use spancontext for link ([@dyladan](https://github.com/dyladan)) +* [#51](https://github.com/open-telemetry/opentelemetry-js-api/pull/51) feat: add function to wrap SpanContext in NonRecordingSpan #49 ([@dyladan](https://github.com/dyladan)) + +### :memo: Documentation + +* [#64](https://github.com/open-telemetry/opentelemetry-js-api/pull/64) chore: document the reason for symbol.for ([@dyladan](https://github.com/dyladan)) +* [#44](https://github.com/open-telemetry/opentelemetry-js-api/pull/44) chore: updating readme headline and fixing links ([@obecny](https://github.com/obecny)) + +### Committers: 6 + +* Bartlomiej Obecny ([@obecny](https://github.com/obecny)) +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) +* Naseem ([@naseemkullah](https://github.com/naseemkullah)) +* Valentin Marchaud ([@vmarchaud](https://github.com/vmarchaud)) +* t2t2 ([@t2t2](https://github.com/t2t2)) + +## 1.0.0-rc.0 + +### :memo: Documentation + +* [#20](https://github.com/open-telemetry/opentelemetry-js-api/pull/20) docs: document latest manual tracing ([@dyladan](https://github.com/dyladan)) +* [#18](https://github.com/open-telemetry/opentelemetry-js-api/pull/18) chore: deploy docs on a release ([@dyladan](https://github.com/dyladan)) +* [#19](https://github.com/open-telemetry/opentelemetry-js-api/pull/19) docs: fix readme links ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## 0.18.1 + +### :bug: Bug Fix + +* [#16](https://github.com/open-telemetry/opentelemetry-js-api/pull/16) fix: Reverse the direction of the semver check ([@dyladan](https://github.com/dyladan)) + +### Committers: 1 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) + +## v0.18.0 + +### :boom: Breaking Change + +* [#9](https://github.com/open-telemetry/opentelemetry-js-api/pull/9) chore: refactor diag logger ([@dyladan](https://github.com/dyladan)) + +### :rocket: Enhancement + +* [#10](https://github.com/open-telemetry/opentelemetry-js-api/pull/10) Use semver to determine API compatibility ([@dyladan](https://github.com/dyladan)) + +### :house: Internal + +* [#12](https://github.com/open-telemetry/opentelemetry-js-api/pull/12) chore: don't disable rule eqeqeq ([@Flarna](https://github.com/Flarna)) +* [#8](https://github.com/open-telemetry/opentelemetry-js-api/pull/8) chore: remove nycrc in favor of tsconfig reporting ([@dyladan](https://github.com/dyladan)) +* [#3](https://github.com/open-telemetry/opentelemetry-js-api/pull/3) chore: add test workflow ([@dyladan](https://github.com/dyladan)) +* [#4](https://github.com/open-telemetry/opentelemetry-js-api/pull/4) chore: remove package lock ([@dyladan](https://github.com/dyladan)) +* [#2](https://github.com/open-telemetry/opentelemetry-js-api/pull/2) chore: add lint workflow ([@dyladan](https://github.com/dyladan)) + +### Committers: 2 + +* Daniel Dyla ([@dyladan](https://github.com/dyladan)) +* Gerhard Stöbich ([@Flarna](https://github.com/Flarna)) + +## v0.17.0 + +Versions previous to `0.18.0` were developed in another repository. +To see previous changelog entries see the [CHANGELOG.md](https://github.com/open-telemetry/opentelemetry-js/blob/main/CHANGELOG.md). diff --git a/node_modules/@opentelemetry/api/LICENSE b/node_modules/@opentelemetry/api/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/node_modules/@opentelemetry/api/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/@opentelemetry/api/README.md b/node_modules/@opentelemetry/api/README.md new file mode 100644 index 0000000000..363cfac136 --- /dev/null +++ b/node_modules/@opentelemetry/api/README.md @@ -0,0 +1,140 @@ + +--- +

+ + API Documentation +   •   + Getting In Touch (GitHub Discussions) + +

+ +

+ + GitHub release (latest by date including pre-releases) + + + Codecov Status + + + license + +
+ + Build Status + + + Build Status + +

+ +--- + +# OpenTelemetry API for JavaScript + +[![NPM Published Version][npm-img]][npm-url] + +This package provides everything needed to interact with the OpenTelemetry API, including all TypeScript interfaces, enums, and no-op implementations. It is intended for use both on the server and in the browser. + +The methods in this package perform no operations by default. This means they can be safely called by a library or end-user application whether there is an SDK registered or not. In order to generate and export telemetry data, you will also need an SDK such as the [OpenTelemetry JS SDK][opentelemetry-js]. + +## Tracing Quick Start + +### You Will Need + +- An application you wish to instrument +- [OpenTelemetry JS SDK][opentelemetry-js] +- Node.js >=8.5.0 (14+ is preferred) or an ECMAScript 5+ compatible browser + +**Note:** ECMAScript 5+ compatibility is for this package only. Please refer to the documentation for the SDK you are using to determine its minimum ECMAScript version. + +**Note for library authors:** Only your end users will need an OpenTelemetry SDK. If you wish to support OpenTelemetry in your library, you only need to use the OpenTelemetry API. For more information, please read the [tracing documentation][docs-tracing]. + +### Install Dependencies + +```sh +npm install @opentelemetry/api @opentelemetry/sdk-trace-base +``` + +### Trace Your Application + +In order to get started with tracing, you will need to first register an SDK. The SDK you are using may provide a convenience method which calls the registration methods for you, but if you would like to call them directly they are documented here: [sdk registration methods][docs-sdk-registration]. + +Once you have registered an SDK, you can start and end spans. A simple example of basic SDK registration and tracing a simple operation is below. The example should export spans to the console once per second. For more information, see the [tracing documentation][docs-tracing]. + +```javascript +const { trace } = require("@opentelemetry/api"); +const { BasicTracerProvider, ConsoleSpanExporter, SimpleSpanProcessor } = require("@opentelemetry/sdk-trace-base"); + +// Create and register an SDK +const provider = new BasicTracerProvider(); +provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter())); +trace.setGlobalTracerProvider(provider); + +// Acquire a tracer from the global tracer provider which will be used to trace the application +const name = 'my-application-name'; +const version = '0.1.0'; +const tracer = trace.getTracer(name, version); + +// Trace your application by creating spans +async function operation() { + const span = tracer.startSpan("do operation"); + + // mock some work by sleeping 1 second + await new Promise((resolve, reject) => { + setTimeout(resolve, 1000); + }) + + span.end(); +} + +async function main() { + while (true) { + await operation(); + } +} + +main(); +``` + +## Version Compatibility + +Because the npm installer and node module resolution algorithm could potentially allow two or more copies of any given package to exist within the same `node_modules` structure, the OpenTelemetry API takes advantage of a variable on the `global` object to store the global API. When an API method in the API package is called, it checks if this `global` API exists and proxies calls to it if and only if it is a compatible API version. This means if a package has a dependency on an OpenTelemetry API version which is not compatible with the API used by the end user, the package will receive a no-op implementation of the API. + +## Upgrade Guidelines + +### 0.21.0 to 1.0.0 + +No breaking changes + +### 0.20.0 to 0.21.0 + +- [#78](https://github.com/open-telemetry/opentelemetry-js-api/issues/78) `api.context.bind` arguments reversed and `context` is now a required argument. +- [#46](https://github.com/open-telemetry/opentelemetry-js-api/issues/46) Noop classes and singletons are no longer exported. To create a noop span it is recommended to use `api.trace.wrapSpanContext` with `INVALID_SPAN_CONTEXT` instead of using the `NOOP_TRACER`. + +### 1.0.0-rc.3 to 0.20.0 + +- Removing `TimedEvent` which was not part of spec +- `HttpBaggage` renamed to `HttpBaggagePropagator` +- [#45](https://github.com/open-telemetry/opentelemetry-js-api/pull/45) `Span#context` renamed to `Span#spanContext` +- [#47](https://github.com/open-telemetry/opentelemetry-js-api/pull/47) `getSpan`/`setSpan`/`getSpanContext`/`setSpanContext` moved to `trace` namespace +- [#55](https://github.com/open-telemetry/opentelemetry-js-api/pull/55) `getBaggage`/`setBaggage`/`createBaggage` moved to `propagation` namespace + +## Useful links + +- For more information on OpenTelemetry, visit: +- For more about OpenTelemetry JavaScript: +- For help or feedback on this project, join us in [GitHub Discussions][discussions-url] + +## License + +Apache 2.0 - See [LICENSE][license-url] for more information. + +[opentelemetry-js]: https://github.com/open-telemetry/opentelemetry-js + +[discussions-url]: https://github.com/open-telemetry/opentelemetry-js/discussions +[license-url]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/LICENSE +[license-image]: https://img.shields.io/badge/license-Apache_2.0-green.svg?style=flat +[npm-url]: https://www.npmjs.com/package/@opentelemetry/api +[npm-img]: https://badge.fury.io/js/%40opentelemetry%2Fapi.svg +[docs-tracing]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/tracing.md +[docs-sdk-registration]: https://github.com/open-telemetry/opentelemetry-js-api/blob/main/docs/sdk-registration.md diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.d.ts b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts new file mode 100644 index 0000000000..61caee8dab --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js b/node_modules/@opentelemetry/api/build/esm/api/context.js new file mode 100644 index 0000000000..358f0e462b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js @@ -0,0 +1,90 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +import { NoopContextManager } from '../context/NoopContextManager'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + return ContextAPI; +}()); +export { ContextAPI }; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js.map b/node_modules/@opentelemetry/api/build/esm/api/context.js.map new file mode 100644 index 0000000000..2cac260bfd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,+BAA+B,CAAC;AAEnE,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAuB,GAA9B,UAA+B,cAA8B;QAC3D,OAAO,cAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,2BAAM,GAAb;QACE,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,yBAAI,GAAX,UACE,OAAgB,EAChB,EAAK,EACL,OAA8B;;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,CAAA,KAAA,IAAI,CAAC,kBAAkB,EAAE,CAAA,CAAC,IAAI,0BAAC,OAAO,EAAE,EAAE,EAAE,OAAO,GAAK,IAAI,GAAE;IACvE,CAAC;IAED;;;;;OAKG;IACI,yBAAI,GAAX,UAAe,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,uCAAkB,GAA1B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,4BAAO,GAAd;QACE,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AAnED,IAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts new file mode 100644 index 0000000000..5c8c0befe5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.d.ts @@ -0,0 +1,38 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLogLevel } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - [Optional] The DiagLogger instance to set as the default logger. + * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO. + * @returns true if the logger was successfully registered, else false + */ + setLogger: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js b/node_modules/@opentelemetry/api/build/esm/api/diag.js new file mode 100644 index 0000000000..05d85791e5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js @@ -0,0 +1,90 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagComponentLogger } from '../diag/ComponentLogger'; +import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; +import { DiagLogLevel, } from '../diag/types'; +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + self.setLogger = function (logger, logLevel) { + var _a, _b; + if (logLevel === void 0) { logLevel = DiagLogLevel.INFO; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + var oldLogger = getGlobal('diag'); + var newLogger = createLogLevelDiagLogger(logLevel, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger) { + var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return registerGlobal('diag', newLogger, self, true); + }; + self.disable = function () { + unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +export { DiagAPI }; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js.map b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map new file mode 100644 index 0000000000..29a4d74379 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,mBAAmB,EAAE,MAAM,yBAAyB,CAAC;AAC9D,OAAO,EAAE,wBAAwB,EAAE,MAAM,iCAAiC,CAAC;AAC3E,OAAO,EAIL,YAAY,GACb,MAAM,eAAe,CAAC;AACvB,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAElC,IAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH;IAYE;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAc,IAAI,EAAE;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,IAAI,CAAC,SAAS,GAAG,UACf,MAAkB,EAClB,QAA0C;;YAA1C,yBAAA,EAAA,WAAyB,YAAY,CAAC,IAAI;YAE1C,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAM,SAAS,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;YACpC,IAAM,SAAS,GAAG,wBAAwB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC7D,kFAAkF;YAClF,IAAI,SAAS,EAAE;gBACb,IAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,6CAA2C,KAAO,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,+DAA6D,KAAO,CACrE,CAAC;aACH;YAED,OAAO,cAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG;YACb,gBAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,UAAC,OAA+B;YAC3D,OAAO,IAAI,mBAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAtED,oDAAoD;IACtC,gBAAQ,GAAtB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IA4FH,cAAC;AAAD,CAAC,AAtGD,IAsGC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n self.setLogger = (\n logger: DiagLogger,\n logLevel: DiagLogLevel = DiagLogLevel.INFO\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(logLevel, logger);\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - [Optional] The DiagLogger instance to set as the default logger.\n * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO.\n * @returns true if the logger was successfully registered, else false\n */\n public setLogger!: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean;\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts new file mode 100644 index 0000000000..cf372cf20f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.d.ts @@ -0,0 +1,48 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js b/node_modules/@opentelemetry/api/build/esm/api/propagation.js new file mode 100644 index 0000000000..ef694cb791 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js @@ -0,0 +1,88 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; +import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = createBaggage; + this.getBaggage = getBaggage; + this.setBaggage = setBaggage; + this.deleteBaggage = deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return registerGlobal(API_NAME, propagator, DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +export { PropagationAPI }; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map new file mode 100644 index 0000000000..60ddbbdb79 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,qBAAqB,EAAE,MAAM,sCAAsC,CAAC;AAC7E,OAAO,EACL,oBAAoB,EACpB,oBAAoB,GAIrB,MAAM,kCAAkC,CAAC;AAC1C,OAAO,EACL,UAAU,EACV,UAAU,EACV,aAAa,GACd,MAAM,4BAA4B,CAAC;AACpC,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,IAAM,wBAAwB,GAAG,IAAI,qBAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH;IAGE,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,aAAa,CAAC;QAE9B,eAAU,GAAG,UAAU,CAAC;QAExB,eAAU,GAAG,UAAU,CAAC;QAExB,kBAAa,GAAG,aAAa,CAAC;IApEd,CAAC;IAExB,uDAAuD;IACzC,0BAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAmB,GAA1B,UAA2B,UAA6B;QACtD,OAAO,cAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,+BAAM,GAAb,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,gCAAO,GAAd,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,6BAAqD;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,+BAAM,GAAb;QACE,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,gCAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAUO,6CAAoB,GAA5B;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;IACH,qBAAC;AAAD,CAAC,AA7ED,IA6EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts new file mode 100644 index 0000000000..9135b74b69 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.d.ts @@ -0,0 +1,39 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js b/node_modules/@opentelemetry/api/build/esm/api/trace.js new file mode 100644 index 0000000000..1b40cad3ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js @@ -0,0 +1,76 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; +import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; +import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; +import { DiagAPI } from './diag'; +var API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +var TraceAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function TraceAPI() { + this._proxyTracerProvider = new ProxyTracerProvider(); + this.wrapSpanContext = wrapSpanContext; + this.isSpanContextValid = isSpanContextValid; + this.deleteSpan = deleteSpan; + this.getSpan = getSpan; + this.getSpanContext = getSpanContext; + this.setSpan = setSpan; + this.setSpanContext = setSpanContext; + } + /** Get the singleton instance of the Trace API */ + TraceAPI.getInstance = function () { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + }; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + TraceAPI.prototype.setGlobalTracerProvider = function (provider) { + var success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + }; + /** + * Returns the global tracer provider. + */ + TraceAPI.prototype.getTracerProvider = function () { + return getGlobal(API_NAME) || this._proxyTracerProvider; + }; + /** + * Returns a tracer from the global tracer provider. + */ + TraceAPI.prototype.getTracer = function (name, version) { + return this.getTracerProvider().getTracer(name, version); + }; + /** Remove the global tracer provider */ + TraceAPI.prototype.disable = function () { + unregisterGlobal(API_NAME, DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider(); + }; + return TraceAPI; +}()); +export { TraceAPI }; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js.map b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map new file mode 100644 index 0000000000..9bc9e0462c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EACL,SAAS,EACT,cAAc,EACd,gBAAgB,GACjB,MAAM,0BAA0B,CAAC;AAClC,OAAO,EAAE,mBAAmB,EAAE,MAAM,8BAA8B,CAAC;AACnE,OAAO,EACL,kBAAkB,EAClB,eAAe,GAChB,MAAM,4BAA4B,CAAC;AAGpC,OAAO,EACL,UAAU,EACV,OAAO,EACP,cAAc,EACd,OAAO,EACP,cAAc,GACf,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AAEjC,IAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH;IAKE,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,eAAe,CAAC;QAElC,uBAAkB,GAAG,kBAAkB,CAAC;QAExC,eAAU,GAAG,UAAU,CAAC;QAExB,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;QAEhC,YAAO,GAAG,OAAO,CAAC;QAElB,mBAAc,GAAG,cAAc,CAAC;IA5DhB,CAAC;IAExB,kDAAkD;IACpC,oBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,0CAAuB,GAA9B,UAA+B,QAAwB;QACrD,IAAM,OAAO,GAAG,cAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,OAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,oCAAiB,GAAxB;QACE,OAAO,SAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,4BAAS,GAAhB,UAAiB,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,0BAAO,GAAd;QACE,gBAAgB,CAAC,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,mBAAmB,EAAE,CAAC;IACxD,CAAC;IAeH,eAAC;AAAD,CAAC,AAnED,IAmEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts new file mode 100644 index 0000000000..a387685b64 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.d.ts @@ -0,0 +1,23 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js new file mode 100644 index 0000000000..9cd7bee22a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +/** + * Baggage key + */ +var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map new file mode 100644 index 0000000000..221f6d067b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD;;GAEG;AACH,IAAM,WAAW,GAAG,gBAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts new file mode 100644 index 0000000000..e6b4554042 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js new file mode 100644 index 0000000000..30de6cbf7a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js @@ -0,0 +1,61 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +export { BaggageImpl }; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map new file mode 100644 index 0000000000..d67f90c0e0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH;IAGE,qBAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW;QAClB,IAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,mCAAa,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,UAAC,EAAM;gBAAL,CAAC,QAAA,EAAE,CAAC,QAAA;YAAM,OAAA,CAAC,CAAC,EAAE,CAAC,CAAC;QAAN,CAAM,CAAC,CAAC;IACrE,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW,EAAE,KAAmB;QACvC,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,iCAAW,GAAX,UAAY,GAAW;QACrB,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,mCAAa,GAAb;QAAc,cAAiB;aAAjB,UAAiB,EAAjB,qBAAiB,EAAjB,IAAiB;YAAjB,yBAAiB;;QAC7B,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAkB,UAAI,EAAJ,aAAI,EAAJ,kBAAI,EAAJ,IAAI,EAAE;YAAnB,IAAM,GAAG,aAAA;YACZ,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,2BAAK,GAAL;QACE,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts new file mode 100644 index 0000000000..9cd991c1cd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js new file mode 100644 index 0000000000..0e7dc36dd8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export var baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map new file mode 100644 index 0000000000..f0748665cd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH;;GAEG;AACH,MAAM,CAAC,IAAM,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts new file mode 100644 index 0000000000..32fa0ec6bd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js b/node_modules/@opentelemetry/api/build/esm/baggage/types.js new file mode 100644 index 0000000000..928faad02d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map new file mode 100644 index 0000000000..ae80c19757 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts new file mode 100644 index 0000000000..9955d9e285 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js new file mode 100644 index 0000000000..3cc27165b9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagAPI } from '../api/diag'; +import { BaggageImpl } from './internal/baggage-impl'; +import { baggageEntryMetadataSymbol } from './internal/symbol'; +var diag = DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new BaggageImpl(new Map(Object.entries(entries))); +} +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map new file mode 100644 index 0000000000..f5a00f5bb5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,aAAa,CAAC;AACtC,OAAO,EAAE,WAAW,EAAE,MAAM,yBAAyB,CAAC;AACtD,OAAO,EAAE,0BAA0B,EAAE,MAAM,mBAAmB,CAAC;AAG/D,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,MAAM,UAAU,aAAa,CAC3B,OAA0C;IAA1C,wBAAA,EAAA,YAA0C;IAE1C,OAAO,IAAI,WAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,uDAAqD,OAAO,GAAK,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,0BAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts new file mode 100644 index 0000000000..19994fb2a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js new file mode 100644 index 0000000000..dbb1e49772 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map new file mode 100644 index 0000000000..b590667432 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts new file mode 100644 index 0000000000..e175a7fdc8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js b/node_modules/@opentelemetry/api/build/esm/common/Exception.js new file mode 100644 index 0000000000..6522a8e659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map new file mode 100644 index 0000000000..989dd3d181 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts new file mode 100644 index 0000000000..cc3c502c12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js b/node_modules/@opentelemetry/api/build/esm/common/Time.js new file mode 100644 index 0000000000..2abdf582fc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js.map b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map new file mode 100644 index 0000000000..ae124f0361 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts new file mode 100644 index 0000000000..48a165970c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js new file mode 100644 index 0000000000..755322dd2d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js @@ -0,0 +1,47 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +import { ROOT_CONTEXT } from './context'; +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], args)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +export { NoopContextManager }; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map new file mode 100644 index 0000000000..f6217afcee --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;;;;;;AAEH,OAAO,EAAE,YAAY,EAAE,MAAM,WAAW,CAAC;AAGzC;IAAA;IAyBA,CAAC;IAxBC,mCAAM,GAAN;QACE,OAAO,YAAY,CAAC;IACtB,CAAC;IAED,iCAAI,GAAJ,UACE,QAAuB,EACvB,EAAK,EACL,OAA8B;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,EAAE,CAAC,IAAI,OAAP,EAAE,iBAAM,OAAO,GAAK,IAAI,GAAE;IACnC,CAAC;IAED,iCAAI,GAAJ,UAAQ,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mCAAM,GAAN;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IAED,oCAAO,GAAP;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IACH,yBAAC;AAAD,CAAC,AAzBD,IAyBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.d.ts b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts new file mode 100644 index 0000000000..8be0259401 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js b/node_modules/@opentelemetry/api/build/esm/context/context.js new file mode 100644 index 0000000000..f8909deeb5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js @@ -0,0 +1,52 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** Get a key to uniquely identify a context value */ +export function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +export var ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js.map b/node_modules/@opentelemetry/api/build/esm/context/context.js.map new file mode 100644 index 0000000000..7a7affd0da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,qDAAqD;AACrD,MAAM,UAAU,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AAED;IAGE;;;;OAIG;IACH,qBAAY,aAAoC;QAC9C,mBAAmB;QACnB,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,IAAK,OAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,EAA7B,CAA6B,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,EAAE,KAAc;YAC1C,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,UAAC,GAAW;YAC7B,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAyBH,kBAAC;AAAD,CAAC,AApDD,IAoDC;AAED,6FAA6F;AAC7F,MAAM,CAAC,IAAM,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.d.ts b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts new file mode 100644 index 0000000000..7e866320cf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js b/node_modules/@opentelemetry/api/build/esm/context/types.js new file mode 100644 index 0000000000..928faad02d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js.map b/node_modules/@opentelemetry/api/build/esm/context/types.js.map new file mode 100644 index 0000000000..d438aa3156 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts new file mode 100644 index 0000000000..f06095031a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js new file mode 100644 index 0000000000..47de4dbbad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js @@ -0,0 +1,77 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { getGlobal } from '../internal/global-utils'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +export { DiagComponentLogger }; +function logProxy(funcName, namespace, args) { + var logger = getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map new file mode 100644 index 0000000000..be524a74da --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,SAAS,EAAE,MAAM,0BAA0B,CAAC;AAGrD;;;;;;;;GAQG;AACH;IAGE,6BAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,qCAAO,GAAd;QAAe,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IACH,0BAAC;AAAD,CAAC,AA1BD,IA0BC;;AAED,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,IAAM,MAAM,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAe,IAAoC,EAAE;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts new file mode 100644 index 0000000000..fa3db1e318 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js new file mode 100644 index 0000000000..5965b8aafd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +export { DiagConsoleLogger }; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map new file mode 100644 index 0000000000..fbfd0cf764 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,IAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH;IACE;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;IAkCH,wBAAC;AAAD,CAAC,AA3DD,IA2DC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts new file mode 100644 index 0000000000..11b35740b5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.d.ts @@ -0,0 +1,3 @@ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.js b/node_modules/@opentelemetry/api/build/esm/diag/index.js new file mode 100644 index 0000000000..61581955d2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/index.js.map b/node_modules/@opentelemetry/api/build/esm/diag/index.js.map new file mode 100644 index 0000000000..a859e32174 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/diag/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,iBAAiB,CAAC;AAChC,cAAc,SAAS,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './consoleLogger';\nexport * from './types';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts new file mode 100644 index 0000000000..890b9f1ef4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js new file mode 100644 index 0000000000..aedab38dc3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { DiagLogLevel } from '../types'; +export function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < DiagLogLevel.NONE) { + maxLevel = DiagLogLevel.NONE; + } + else if (maxLevel > DiagLogLevel.ALL) { + maxLevel = DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + var theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', DiagLogLevel.ERROR), + warn: _filterFunc('warn', DiagLogLevel.WARN), + info: _filterFunc('info', DiagLogLevel.INFO), + debug: _filterFunc('debug', DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), + }; +} +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map new file mode 100644 index 0000000000..7f9fafde04 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAA+B,YAAY,EAAE,MAAM,UAAU,CAAC;AAErE,MAAM,UAAU,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,YAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,YAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,YAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,YAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,IAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,YAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,YAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,YAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts new file mode 100644 index 0000000000..ac71ee3b58 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js new file mode 100644 index 0000000000..7d5ba63d89 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js @@ -0,0 +1,31 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map new file mode 100644 index 0000000000..bf20aeaa68 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,MAAM,UAAU,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts new file mode 100644 index 0000000000..92b0c57868 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.d.ts @@ -0,0 +1,70 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js b/node_modules/@opentelemetry/api/build/esm/diag/types.js new file mode 100644 index 0000000000..306585e830 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel || (DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js.map b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map new file mode 100644 index 0000000000..aa3a7f1ca4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AA+CH;;;;GAIG;AACH,MAAM,CAAN,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,KAAZ,YAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.d.ts b/node_modules/@opentelemetry/api/build/esm/index.d.ts new file mode 100644 index 0000000000..33ff2300ad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.d.ts @@ -0,0 +1,57 @@ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +export type { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +import { TraceAPI } from './api/trace'; +export type { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +import { PropagationAPI } from './api/propagation'; +export type { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +import { DiagAPI } from './api/diag'; +export type { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +declare const _default: { + trace: TraceAPI; + context: ContextAPI; + propagation: PropagationAPI; + diag: DiagAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js b/node_modules/@opentelemetry/api/build/esm/index.js new file mode 100644 index 0000000000..97ce346424 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js @@ -0,0 +1,67 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export var context = ContextAPI.getInstance(); +import { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export var trace = TraceAPI.getInstance(); +import { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export var propagation = PropagationAPI.getInstance(); +import { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export var diag = DiagAPI.instance(); +export default { + trace: trace, + context: context, + propagation: propagation, + diag: diag, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js.map b/node_modules/@opentelemetry/api/build/esm/index.js.map new file mode 100644 index 0000000000..4a82f0b657 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,iBAAiB,CAAC;AAChC,OAAO,EAAE,8BAA8B,EAAE,MAAM,iBAAiB,CAAC;AACjE,cAAc,oBAAoB,CAAC;AACnC,cAAc,eAAe,CAAC;AAC9B,cAAc,qBAAqB,CAAC;AACpC,cAAc,QAAQ,CAAC;AACvB,cAAc,iCAAiC,CAAC;AAChD,cAAc,oBAAoB,CAAC;AACnC,cAAc,cAAc,CAAC;AAC7B,cAAc,qBAAqB,CAAC;AACpC,cAAc,6BAA6B,CAAC;AAC5C,cAAc,iBAAiB,CAAC;AAChC,cAAc,wBAAwB,CAAC;AACvC,cAAc,sBAAsB,CAAC;AACrC,cAAc,mBAAmB,CAAC;AAClC,cAAc,cAAc,CAAC;AAC7B,cAAc,qBAAqB,CAAC;AACpC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,qBAAqB,CAAC;AACpC,cAAc,qBAAqB,CAAC;AACpC,OAAO,EAAE,gBAAgB,EAAE,MAAM,wBAAwB,CAAC;AAC1D,cAAc,yBAAyB,CAAC;AACxC,cAAc,gBAAgB,CAAC;AAC/B,cAAc,wBAAwB,CAAC;AAEvC,OAAO,EACL,kBAAkB,EAClB,cAAc,EACd,aAAa,GACd,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,cAAc,EACd,eAAe,EACf,oBAAoB,GACrB,MAAM,gCAAgC,CAAC;AAExC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAEhC,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,iCAAiC;AACjC,MAAM,CAAC,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAEhD,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAC;AAEvC,+BAA+B;AAC/B,MAAM,CAAC,IAAM,KAAK,GAAG,QAAQ,CAAC,WAAW,EAAE,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAEnD,qCAAqC;AACrC,MAAM,CAAC,IAAM,WAAW,GAAG,cAAc,CAAC,WAAW,EAAE,CAAC;AAExD,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAGrC;;;;;GAKG;AACH,MAAM,CAAC,IAAM,IAAI,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;AAEvC,eAAe;IACb,KAAK,OAAA;IACL,OAAO,SAAA;IACP,WAAW,aAAA;IACX,IAAI,MAAA;CACL,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport * from './common/Exception';\nexport * from './common/Time';\nexport * from './common/Attributes';\nexport * from './diag';\nexport * from './propagation/TextMapPropagator';\nexport * from './trace/attributes';\nexport * from './trace/link';\nexport * from './trace/ProxyTracer';\nexport * from './trace/ProxyTracerProvider';\nexport * from './trace/Sampler';\nexport * from './trace/SamplingResult';\nexport * from './trace/span_context';\nexport * from './trace/span_kind';\nexport * from './trace/span';\nexport * from './trace/SpanOptions';\nexport * from './trace/status';\nexport * from './trace/trace_flags';\nexport * from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport * from './trace/tracer_provider';\nexport * from './trace/tracer';\nexport * from './trace/tracer_options';\n\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\n\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\n\nexport * from './context/context';\nexport * from './context/types';\n\nimport { ContextAPI } from './api/context';\nexport type { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n\nimport { TraceAPI } from './api/trace';\nexport type { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n\nimport { PropagationAPI } from './api/propagation';\nexport type { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n\nimport { DiagAPI } from './api/diag';\nexport type { DiagAPI } from './api/diag';\n\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n\nexport default {\n trace,\n context,\n propagation,\n diag,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts new file mode 100644 index 0000000000..bb90097f98 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.d.ts @@ -0,0 +1,16 @@ +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js new file mode 100644 index 0000000000..2fddde35df --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js @@ -0,0 +1,59 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { _globalThis } from '../platform'; +import { VERSION } from '../version'; +import { isCompatible } from './semver'; +var major = VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = _globalThis; +export function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error('@opentelemetry/api: All API registration versions must match'); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + VERSION + "."); + return true; +} +export function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +export function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map new file mode 100644 index 0000000000..b8ea29e2b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAC;AAG1C,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AACrC,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAExC,IAAM,KAAK,GAAG,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,0BAAwB,KAAO,CAChC,CAAC;AAEF,IAAM,OAAO,GAAG,WAAyB,CAAC;AAE1C,MAAM,UAAU,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAqB;;IAArB,8BAAA,EAAA,qBAAqB;IAErB,IAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,OAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kEAAgE,IAAM,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,OAAO,EAAE;QAC3B,0DAA0D;QAC1D,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,8DAA8D,CAC/D,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,iDAA+C,IAAI,UAAK,OAAO,MAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,UAAU,SAAS,CACvB,IAAU;;IAEV,IAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,oDAAkD,IAAI,UAAK,OAAO,MAAG,CACtE,CAAC;IACF,IAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n '@opentelemetry/api: All API registration versions must match'\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts new file mode 100644 index 0000000000..d9f4259e45 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js b/node_modules/@opentelemetry/api/build/esm/internal/semver.js new file mode 100644 index 0000000000..2a788a0b81 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js @@ -0,0 +1,118 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { VERSION } from '../version'; +var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export function _makeCompatibilityCheck(ownVersion) { + var acceptedVersions = new Set([ownVersion]); + var rejectedVersions = new Set(); + var myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return function () { return false; }; + } + var ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + var globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + var globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export var isCompatible = _makeCompatibilityCheck(VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map new file mode 100644 index 0000000000..75579a4a0c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,OAAO,EAAE,MAAM,YAAY,CAAC;AAErC,IAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,MAAM,UAAU,uBAAuB,CACrC,UAAkB;IAElB,IAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,IAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,IAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;KACpB;IAED,IAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,IAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAED;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,IAAM,YAAY,GAAG,uBAAuB,CAAC,OAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts new file mode 100644 index 0000000000..e73fd73e55 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js new file mode 100644 index 0000000000..1d20f5d958 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// Updates to this file should also be replicated to @opentelemetry/api-metrics and +// @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +export var _globalThis = typeof globalThis === 'object' ? globalThis : + typeof self === 'object' ? self : + typeof window === 'object' ? window : + typeof global === 'object' ? global : + {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map new file mode 100644 index 0000000000..5936f5fb2b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,mFAAmF;AACnF,2BAA2B;AAE3B;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AAC9E,MAAM,CAAC,IAAM,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;IAC7C,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACjC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;YACrC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;gBACrC,EAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/api-metrics and\n// @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object' ? globalThis :\n typeof self === 'object' ? self :\n typeof window === 'object' ? window :\n typeof global === 'object' ? global :\n {} as typeof globalThis;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts new file mode 100644 index 0000000000..ba20e1232c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js new file mode 100644 index 0000000000..efcad2e9c3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map new file mode 100644 index 0000000000..07adcd9de6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts new file mode 100644 index 0000000000..90595da96f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js b/node_modules/@opentelemetry/api/build/esm/platform/index.js new file mode 100644 index 0000000000..c0df125ca9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './node'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map new file mode 100644 index 0000000000..9494c5314b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,QAAQ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts new file mode 100644 index 0000000000..e3c83e5d29 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js new file mode 100644 index 0000000000..feb9700075 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js @@ -0,0 +1,19 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +export var _globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map new file mode 100644 index 0000000000..5911136a33 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,gEAAgE;AAChE,oEAAoE;AACpE,MAAM,CAAC,IAAM,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts new file mode 100644 index 0000000000..ba20e1232c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js new file mode 100644 index 0000000000..efcad2e9c3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export * from './globalThis'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map new file mode 100644 index 0000000000..f27971869f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,cAAc,cAAc,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 0000000000..398021f3b2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js new file mode 100644 index 0000000000..8e62901201 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js @@ -0,0 +1,34 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * No-op implementations of {@link TextMapPropagator}. + */ +var NoopTextMapPropagator = /** @class */ (function () { + function NoopTextMapPropagator() { + } + /** Noop inject function does nothing */ + NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; + /** Noop extract function does nothing and returns the input context */ + NoopTextMapPropagator.prototype.extract = function (context, _carrier) { + return context; + }; + NoopTextMapPropagator.prototype.fields = function () { + return []; + }; + return NoopTextMapPropagator; +}()); +export { NoopTextMapPropagator }; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 0000000000..40be5661aa --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH;;GAEG;AACH;IAAA;IAUA,CAAC;IATC,wCAAwC;IACxC,sCAAM,GAAN,UAAO,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,uCAAO,GAAP,UAAQ,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,sCAAM,GAAN;QACE,OAAO,EAAE,CAAC;IACZ,CAAC;IACH,4BAAC;AAAD,CAAC,AAVD,IAUC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts new file mode 100644 index 0000000000..dc39367bfb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js new file mode 100644 index 0000000000..c5f5311fb1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var defaultTextMapGetter = { + get: function (carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys: function (carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +export var defaultTextMapSetter = { + set: function (carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map new file mode 100644 index 0000000000..9de7d06ab5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAkGH,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,YAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEF,MAAM,CAAC,IAAM,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts new file mode 100644 index 0000000000..883cf91445 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js new file mode 100644 index 0000000000..ba2e0ff5cc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js @@ -0,0 +1,62 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +export { NonRecordingSpan }; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map new file mode 100644 index 0000000000..6af5c8d463 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAKH,OAAO,EAAE,oBAAoB,EAAE,MAAM,0BAA0B,CAAC;AAKhE;;;;GAIG;AACH;IACE,0BACmB,YAAgD;QAAhD,6BAAA,EAAA,mCAAgD;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,sCAAW,GAAX;QACE,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,uCAAY,GAAZ,UAAa,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,wCAAa,GAAb,UAAc,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,mCAAQ,GAAR,UAAS,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,oCAAS,GAAT,UAAU,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,qCAAU,GAAV,UAAW,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,8BAAG,GAAH,UAAI,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,sCAAW,GAAX;QACE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,0CAAe,GAAf,UAAgB,UAAqB,EAAE,KAAiB,IAAS,CAAC;IACpE,uBAAC;AAAD,CAAC,AA7CD,IA6CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts new file mode 100644 index 0000000000..0e059c990c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js new file mode 100644 index 0000000000..7ad14a2d78 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js @@ -0,0 +1,75 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ContextAPI } from '../api/context'; +import { getSpanContext, setSpan } from '../trace/context-utils'; +import { NonRecordingSpan } from './NonRecordingSpan'; +import { isSpanContextValid } from './spancontext-utils'; +var context = ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan(); + } + var parentFromContext = context && getSpanContext(context); + if (isSpanContext(parentFromContext) && + isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : context.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = setSpan(parentContext, span); + return context.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +export { NoopTracer }; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map new file mode 100644 index 0000000000..71cfd1b607 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,gBAAgB,CAAC;AAE5C,OAAO,EAAE,cAAc,EAAE,OAAO,EAAE,MAAM,wBAAwB,CAAC;AACjE,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD,OAAO,EAAE,kBAAkB,EAAE,MAAM,qBAAqB,CAAC;AAKzD,IAAM,OAAO,GAAG,UAAU,CAAC,WAAW,EAAE,CAAC;AAEzC;;GAEG;AACH;IAAA;IAgEA,CAAC;IA/DC,gCAAgC;IAChC,8BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,IAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;QAED,IAAM,iBAAiB,GAAG,OAAO,IAAI,cAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,kBAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,gBAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,gBAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,oCAAe,GAAf,UACE,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,IAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QAC9C,IAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,IAAM,kBAAkB,GAAG,OAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,OAAO,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IACH,iBAAC;AAAD,CAAC,AAhED,IAgEC;;AAED,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst context = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? context.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return context.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts new file mode 100644 index 0000000000..ec0fe79201 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js new file mode 100644 index 0000000000..14d44c22bc --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js @@ -0,0 +1,32 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { + return new NoopTracer(); + }; + return NoopTracerProvider; +}()); +export { NoopTracerProvider }; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map new file mode 100644 index 0000000000..2705ab4994 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAK1C;;;;;GAKG;AACH;IAAA;IAQA,CAAC;IAPC,sCAAS,GAAT,UACE,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,UAAU,EAAE,CAAC;IAC1B,CAAC;IACH,yBAAC;AAAD,CAAC,AARD,IAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts new file mode 100644 index 0000000000..116cc5c1f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js new file mode 100644 index 0000000000..341991b9e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js @@ -0,0 +1,53 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { NoopTracer } from './NoopTracer'; +var NOOP_TRACER = new NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +export { ProxyTracer }; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map new file mode 100644 index 0000000000..178a519ddb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,cAAc,CAAC;AAM1C,IAAM,WAAW,GAAG,IAAI,UAAU,EAAE,CAAC;AAErC;;GAEG;AACH;IAIE,qBACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,+BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,qCAAe,GAAf,UACE,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,IAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,gCAAU,GAAlB;QACE,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,IAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEvF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options);\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts new file mode 100644 index 0000000000..ee7eafa94d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js new file mode 100644 index 0000000000..3cc735c902 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js @@ -0,0 +1,54 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { ProxyTracer } from './ProxyTracer'; +import { NoopTracerProvider } from './NoopTracerProvider'; +var NOOP_TRACER_PROVIDER = new NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + }; + return ProxyTracerProvider; +}()); +export { ProxyTracerProvider }; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map new file mode 100644 index 0000000000..fc9179b177 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH,OAAO,EAAE,WAAW,EAAE,MAAM,eAAe,CAAC;AAC5C,OAAO,EAAE,kBAAkB,EAAE,MAAM,sBAAsB,CAAC;AAG1D,IAAM,oBAAoB,GAAG,IAAI,kBAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH;IAAA;IA+BA,CAAC;IA5BC;;OAEG;IACH,uCAAS,GAAT,UAAU,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,WAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,yCAAW,GAAX;;QACE,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,yCAAW,GAAX,UAAY,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,+CAAiB,GAAjB,UACE,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IACH,0BAAC;AAAD,CAAC,AA/BD,IA+BC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts new file mode 100644 index 0000000000..dd8dc811e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.d.ts @@ -0,0 +1,30 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js new file mode 100644 index 0000000000..22a60a1273 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map new file mode 100644 index 0000000000..c88d802fa1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts new file mode 100644 index 0000000000..9394846575 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.d.ts @@ -0,0 +1,39 @@ +import { SpanAttributes } from './attributes'; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js new file mode 100644 index 0000000000..3587532d58 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js @@ -0,0 +1,38 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision || (SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map new file mode 100644 index 0000000000..9c73981d2e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAIH;;;GAGG;AACH,MAAM,CAAN,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,KAAhB,gBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\n\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts new file mode 100644 index 0000000000..c8045689b7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js new file mode 100644 index 0000000000..06b42b1513 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map new file mode 100644 index 0000000000..9132a33ec1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts new file mode 100644 index 0000000000..a2a5d2a2f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js new file mode 100644 index 0000000000..6f1b9a3f8d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map new file mode 100644 index 0000000000..2b02be7851 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts new file mode 100644 index 0000000000..1d46619bea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.d.ts @@ -0,0 +1,37 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js new file mode 100644 index 0000000000..1f7525a92a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js @@ -0,0 +1,66 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { createContextKey } from '../context/context'; +import { NonRecordingSpan } from './NonRecordingSpan'; +/** + * span key + */ +var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan(spanContext)); +} +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map new file mode 100644 index 0000000000..81b361642b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD;;GAEG;AACH,IAAM,QAAQ,GAAG,gBAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAED;;;;;;GAMG;AACH,MAAM,UAAU,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts new file mode 100644 index 0000000000..9ed5ecb710 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js new file mode 100644 index 0000000000..7514069350 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js @@ -0,0 +1,102 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { validateKey, validateValue } from './tracestate-validators'; +var MAX_TRACE_STATE_ITEMS = 32; +var MAX_TRACE_STATE_LEN = 512; +var LIST_MEMBERS_SEPARATOR = ','; +var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +var TraceStateImpl = /** @class */ (function () { + function TraceStateImpl(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + TraceStateImpl.prototype.set = function (key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + var traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + }; + TraceStateImpl.prototype.unset = function (key) { + var traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + }; + TraceStateImpl.prototype.get = function (key) { + return this._internalState.get(key); + }; + TraceStateImpl.prototype.serialize = function () { + var _this = this; + return this._keys() + .reduce(function (agg, key) { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + }; + TraceStateImpl.prototype._parse = function (rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce(function (agg, part) { + var listMember = part.trim(); // Optional Whitespace (OWS) handling + var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + var key = listMember.slice(0, i); + var value = listMember.slice(i + 1, part.length); + if (validateKey(key) && validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + }; + TraceStateImpl.prototype._keys = function () { + return Array.from(this._internalState.keys()).reverse(); + }; + TraceStateImpl.prototype._clone = function () { + var traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + }; + return TraceStateImpl; +}()); +export { TraceStateImpl }; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map new file mode 100644 index 0000000000..102cb4f3e4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,WAAW,EAAE,aAAa,EAAE,MAAM,yBAAyB,CAAC;AAErE,IAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,IAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,IAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,IAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH;IAGE,wBAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,8BAAK,GAAL,UAAM,GAAW;QACf,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,kCAAS,GAAT;QAAA,iBAOC;QANC,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,UAAC,GAAa,EAAE,GAAG;YACzB,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,KAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,+BAAM,GAAd,UAAe,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,UAAC,GAAwB,EAAE,IAAY;YAC7C,IAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,IAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,IAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,WAAW,CAAC,GAAG,CAAC,IAAI,aAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,8BAAK,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,+BAAM,GAAd;QACE,IAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,qBAAC;AAAD,CAAC,AA5ED,IA4EC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts new file mode 100644 index 0000000000..4917f99d1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js new file mode 100644 index 0000000000..1d3f14bc52 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js @@ -0,0 +1,41 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; +var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; +var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); +var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map new file mode 100644 index 0000000000..72899fc60c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,IAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,IAAM,SAAS,GAAG,UAAQ,oBAAoB,YAAS,CAAC;AACxD,IAAM,gBAAgB,GAAG,aAAW,oBAAoB,qBAAgB,oBAAoB,WAAQ,CAAC;AACrG,IAAM,eAAe,GAAG,IAAI,MAAM,CAAC,SAAO,SAAS,SAAI,gBAAgB,OAAI,CAAC,CAAC;AAC7E,IAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,IAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,MAAM,UAAU,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts new file mode 100644 index 0000000000..e3b51fe45b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js new file mode 100644 index 0000000000..feea469141 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js @@ -0,0 +1,20 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceStateImpl } from './tracestate-impl'; +export function createTraceState(rawTraceState) { + return new TraceStateImpl(rawTraceState); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map new file mode 100644 index 0000000000..f1b6a146ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,cAAc,EAAE,MAAM,mBAAmB,CAAC;AAGnD,MAAM,UAAU,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,cAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts new file mode 100644 index 0000000000..e32dab9d60 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js new file mode 100644 index 0000000000..36dc1d62c9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js @@ -0,0 +1,24 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { TraceFlags } from './trace_flags'; +export var INVALID_SPANID = '0000000000000000'; +export var INVALID_TRACEID = '00000000000000000000000000000000'; +export var INVALID_SPAN_CONTEXT = { + traceId: INVALID_TRACEID, + spanId: INVALID_SPANID, + traceFlags: TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map new file mode 100644 index 0000000000..970a3f412c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAGH,OAAO,EAAE,UAAU,EAAE,MAAM,eAAe,CAAC;AAE3C,MAAM,CAAC,IAAM,cAAc,GAAG,kBAAkB,CAAC;AACjD,MAAM,CAAC,IAAM,eAAe,GAAG,kCAAkC,CAAC;AAClE,MAAM,CAAC,IAAM,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,eAAe;IACxB,MAAM,EAAE,cAAc;IACtB,UAAU,EAAE,UAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts new file mode 100644 index 0000000000..d1326f5156 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.d.ts @@ -0,0 +1,24 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js b/node_modules/@opentelemetry/api/build/esm/trace/link.js new file mode 100644 index 0000000000..7c8accbe1b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js.map b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map new file mode 100644 index 0000000000..9fb530897f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts new file mode 100644 index 0000000000..c5f28141c0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js b/node_modules/@opentelemetry/api/build/esm/trace/span.js new file mode 100644 index 0000000000..f41c7f6f52 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map new file mode 100644 index 0000000000..0a4f10247c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts new file mode 100644 index 0000000000..f30933a1fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js new file mode 100644 index 0000000000..1bb88b0d7b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map new file mode 100644 index 0000000000..dbf0bfe523 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts new file mode 100644 index 0000000000..a89846f656 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js new file mode 100644 index 0000000000..1119df92f7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js @@ -0,0 +1,43 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind || (SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map new file mode 100644 index 0000000000..deb6be7321 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,KAAR,QAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts new file mode 100644 index 0000000000..f191111487 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js new file mode 100644 index 0000000000..88545bb587 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js @@ -0,0 +1,42 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; +import { NonRecordingSpan } from './NonRecordingSpan'; +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +export function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; +} +export function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; +} +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export function wrapSpanContext(spanContext) { + return new NonRecordingSpan(spanContext); +} +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map new file mode 100644 index 0000000000..a4dc6c2c32 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,OAAO,EAAE,cAAc,EAAE,eAAe,EAAE,MAAM,0BAA0B,CAAC;AAC3E,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAItD,IAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,IAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,MAAM,UAAU,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,eAAe,CAAC;AAC1E,CAAC;AAED,MAAM,UAAU,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,cAAc,CAAC;AACtE,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,gBAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts new file mode 100644 index 0000000000..ab19a68f72 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js b/node_modules/@opentelemetry/api/build/esm/trace/status.js new file mode 100644 index 0000000000..5ee55e42f0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js @@ -0,0 +1,20 @@ +/** + * An enumeration of status codes. + */ +export var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode || (SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js.map b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map new file mode 100644 index 0000000000..af7e7d7f40 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":"AAsBA;;GAEG;AACH,MAAM,CAAN,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,KAAd,cAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts new file mode 100644 index 0000000000..11288ba95a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js new file mode 100644 index 0000000000..8a7b000722 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js @@ -0,0 +1,23 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags || (TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map new file mode 100644 index 0000000000..2ea8680d04 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAN,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,KAAV,UAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts new file mode 100644 index 0000000000..f275b8bed6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js new file mode 100644 index 0000000000..a6c368f88d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map new file mode 100644 index 0000000000..64a3d7a2ea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts new file mode 100644 index 0000000000..25090899e8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js new file mode 100644 index 0000000000..ad066dc3f8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map new file mode 100644 index 0000000000..77f6ae9390 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts new file mode 100644 index 0000000000..f3bbccfc25 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js new file mode 100644 index 0000000000..470a3a732b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map new file mode 100644 index 0000000000..70365afc69 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts new file mode 100644 index 0000000000..9b2f7a954c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js new file mode 100644 index 0000000000..adf432a659 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js @@ -0,0 +1,17 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export {}; +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map new file mode 100644 index 0000000000..bfc1cbd109 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.d.ts b/node_modules/@opentelemetry/api/build/esm/version.d.ts new file mode 100644 index 0000000000..28a7146cad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.1.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js b/node_modules/@opentelemetry/api/build/esm/version.js new file mode 100644 index 0000000000..385005d374 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js @@ -0,0 +1,18 @@ +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// this is autogenerated file, see scripts/version-update.js +export var VERSION = '1.1.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js.map b/node_modules/@opentelemetry/api/build/esm/version.js.map new file mode 100644 index 0000000000..fdccd0e2a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/esm/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;GAcG;AAEH,4DAA4D;AAC5D,MAAM,CAAC,IAAM,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.1.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.d.ts b/node_modules/@opentelemetry/api/build/src/api/context.d.ts new file mode 100644 index 0000000000..61caee8dab --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.d.ts @@ -0,0 +1,41 @@ +import { Context, ContextManager } from '../context/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +export declare class ContextAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Context API */ + static getInstance(): ContextAPI; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + setGlobalContextManager(contextManager: ContextManager): boolean; + /** + * Get the currently active context + */ + active(): Context; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + bind(context: Context, target: T): T; + private _getContextManager; + /** Disable and remove the global context manager */ + disable(): void; +} +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js b/node_modules/@opentelemetry/api/build/src/api/context.js new file mode 100644 index 0000000000..f6580a7a65 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js @@ -0,0 +1,93 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ContextAPI = void 0; +var NoopContextManager_1 = require("../context/NoopContextManager"); +var global_utils_1 = require("../internal/global-utils"); +var diag_1 = require("./diag"); +var API_NAME = 'context'; +var NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Context API + */ +var ContextAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function ContextAPI() { + } + /** Get the singleton instance of the Context API */ + ContextAPI.getInstance = function () { + if (!this._instance) { + this._instance = new ContextAPI(); + } + return this._instance; + }; + /** + * Set the current context manager. + * + * @returns true if the context manager was successfully registered, else false + */ + ContextAPI.prototype.setGlobalContextManager = function (contextManager) { + return global_utils_1.registerGlobal(API_NAME, contextManager, diag_1.DiagAPI.instance()); + }; + /** + * Get the currently active context + */ + ContextAPI.prototype.active = function () { + return this._getContextManager().active(); + }; + /** + * Execute a function with an active context + * + * @param context context to be active during function execution + * @param fn function to execute in a context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + ContextAPI.prototype.with = function (context, fn, thisArg) { + var _a; + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], args)); + }; + /** + * Bind a context to a target function or event emitter + * + * @param context context to bind to the event emitter or function. Defaults to the currently active context + * @param target function or event emitter to bind + */ + ContextAPI.prototype.bind = function (context, target) { + return this._getContextManager().bind(context, target); + }; + ContextAPI.prototype._getContextManager = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; + }; + /** Disable and remove the global context manager */ + ContextAPI.prototype.disable = function () { + this._getContextManager().disable(); + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + return ContextAPI; +}()); +exports.ContextAPI = ContextAPI; +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js.map b/node_modules/@opentelemetry/api/build/src/api/context.js.map new file mode 100644 index 0000000000..c6616145fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/api/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;AAEH,oEAAmE;AAEnE,yDAIkC;AAClC,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,SAAS,CAAC;AAC3B,IAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;GAEG;AACH;IAGE,+FAA+F;IAC/F;IAAuB,CAAC;IAExB,oDAAoD;IACtC,sBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,UAAU,EAAE,CAAC;SACnC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAuB,GAA9B,UAA+B,cAA8B;QAC3D,OAAO,6BAAc,CAAC,QAAQ,EAAE,cAAc,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACI,2BAAM,GAAb;QACE,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC5C,CAAC;IAED;;;;;;;OAOG;IACI,yBAAI,GAAX,UACE,OAAgB,EAChB,EAAK,EACL,OAA8B;;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,CAAA,KAAA,IAAI,CAAC,kBAAkB,EAAE,CAAA,CAAC,IAAI,0BAAC,OAAO,EAAE,EAAE,EAAE,OAAO,GAAK,IAAI,GAAE;IACvE,CAAC;IAED;;;;;OAKG;IACI,yBAAI,GAAX,UAAe,OAAgB,EAAE,MAAS;QACxC,OAAO,IAAI,CAAC,kBAAkB,EAAE,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACzD,CAAC;IAEO,uCAAkB,GAA1B;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,oBAAoB,CAAC;IACrD,CAAC;IAED,oDAAoD;IAC7C,4BAAO,GAAd;QACE,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,EAAE,CAAC;QACpC,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IACH,iBAAC;AAAD,CAAC,AAnED,IAmEC;AAnEY,gCAAU","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopContextManager } from '../context/NoopContextManager';\nimport { Context, ContextManager } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'context';\nconst NOOP_CONTEXT_MANAGER = new NoopContextManager();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Context API\n */\nexport class ContextAPI {\n private static _instance?: ContextAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Context API */\n public static getInstance(): ContextAPI {\n if (!this._instance) {\n this._instance = new ContextAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current context manager.\n *\n * @returns true if the context manager was successfully registered, else false\n */\n public setGlobalContextManager(contextManager: ContextManager): boolean {\n return registerGlobal(API_NAME, contextManager, DiagAPI.instance());\n }\n\n /**\n * Get the currently active context\n */\n public active(): Context {\n return this._getContextManager().active();\n }\n\n /**\n * Execute a function with an active context\n *\n * @param context context to be active during function execution\n * @param fn function to execute in a context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n public with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return this._getContextManager().with(context, fn, thisArg, ...args);\n }\n\n /**\n * Bind a context to a target function or event emitter\n *\n * @param context context to bind to the event emitter or function. Defaults to the currently active context\n * @param target function or event emitter to bind\n */\n public bind(context: Context, target: T): T {\n return this._getContextManager().bind(context, target);\n }\n\n private _getContextManager(): ContextManager {\n return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER;\n }\n\n /** Disable and remove the global context manager */\n public disable() {\n this._getContextManager().disable();\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.d.ts b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts new file mode 100644 index 0000000000..5c8c0befe5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.d.ts @@ -0,0 +1,38 @@ +import { ComponentLoggerOptions, DiagLogFunction, DiagLogger, DiagLogLevel } from '../diag/types'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +export declare class DiagAPI implements DiagLogger { + private static _instance?; + /** Get the singleton instance of the DiagAPI API */ + static instance(): DiagAPI; + /** + * Private internal constructor + * @private + */ + private constructor(); + /** + * Set the global DiagLogger and DiagLogLevel. + * If a global diag logger is already set, this will override it. + * + * @param logger - [Optional] The DiagLogger instance to set as the default logger. + * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO. + * @returns true if the logger was successfully registered, else false + */ + setLogger: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean; + /** + * + */ + createComponentLogger: (options: ComponentLoggerOptions) => DiagLogger; + verbose: DiagLogFunction; + debug: DiagLogFunction; + info: DiagLogFunction; + warn: DiagLogFunction; + error: DiagLogFunction; + /** + * Unregister the global logger and return to Noop + */ + disable: () => void; +} +//# sourceMappingURL=diag.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js b/node_modules/@opentelemetry/api/build/src/api/diag.js new file mode 100644 index 0000000000..54c3d3cd47 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js @@ -0,0 +1,93 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagAPI = void 0; +var ComponentLogger_1 = require("../diag/ComponentLogger"); +var logLevelLogger_1 = require("../diag/internal/logLevelLogger"); +var types_1 = require("../diag/types"); +var global_utils_1 = require("../internal/global-utils"); +var API_NAME = 'diag'; +/** + * Singleton object which represents the entry point to the OpenTelemetry internal + * diagnostic API + */ +var DiagAPI = /** @class */ (function () { + /** + * Private internal constructor + * @private + */ + function DiagAPI() { + function _logProxy(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) + return; + return logger[funcName].apply(logger, args); + }; + } + // Using self local variable for minification purposes as 'this' cannot be minified + var self = this; + // DiagAPI specific functions + self.setLogger = function (logger, logLevel) { + var _a, _b; + if (logLevel === void 0) { logLevel = types_1.DiagLogLevel.INFO; } + if (logger === self) { + // There isn't much we can do here. + // Logging to the console might break the user application. + // Try to log to self. If a logger was previously registered it will receive the log. + var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); + self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); + return false; + } + var oldLogger = global_utils_1.getGlobal('diag'); + var newLogger = logLevelLogger_1.createLogLevelDiagLogger(logLevel, logger); + // There already is an logger registered. We'll let it know before overwriting it. + if (oldLogger) { + var stack = (_b = new Error().stack) !== null && _b !== void 0 ? _b : ''; + oldLogger.warn("Current logger will be overwritten from " + stack); + newLogger.warn("Current logger will overwrite one already registered from " + stack); + } + return global_utils_1.registerGlobal('diag', newLogger, self, true); + }; + self.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, self); + }; + self.createComponentLogger = function (options) { + return new ComponentLogger_1.DiagComponentLogger(options); + }; + self.verbose = _logProxy('verbose'); + self.debug = _logProxy('debug'); + self.info = _logProxy('info'); + self.warn = _logProxy('warn'); + self.error = _logProxy('error'); + } + /** Get the singleton instance of the DiagAPI API */ + DiagAPI.instance = function () { + if (!this._instance) { + this._instance = new DiagAPI(); + } + return this._instance; + }; + return DiagAPI; +}()); +exports.DiagAPI = DiagAPI; +//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js.map b/node_modules/@opentelemetry/api/build/src/api/diag.js.map new file mode 100644 index 0000000000..8c9eebd8a7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/diag.js.map @@ -0,0 +1 @@ +{"version":3,"file":"diag.js","sourceRoot":"","sources":["../../../src/api/diag.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2DAA8D;AAC9D,kEAA2E;AAC3E,uCAKuB;AACvB,yDAIkC;AAElC,IAAM,QAAQ,GAAG,MAAM,CAAC;AAExB;;;GAGG;AACH;IAYE;;;OAGG;IACH;QACE,SAAS,SAAS,CAAC,QAA0B;YAC3C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAM,MAAM,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;gBACjC,6BAA6B;gBAC7B,IAAI,CAAC,MAAM;oBAAE,OAAO;gBACpB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAc,IAAI,EAAE;YACnC,CAAC,CAAC;QACJ,CAAC;QAED,mFAAmF;QACnF,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,6BAA6B;QAE7B,IAAI,CAAC,SAAS,GAAG,UACf,MAAkB,EAClB,QAA0C;;YAA1C,yBAAA,EAAA,WAAyB,oBAAY,CAAC,IAAI;YAE1C,IAAI,MAAM,KAAK,IAAI,EAAE;gBACnB,mCAAmC;gBACnC,2DAA2D;gBAC3D,qFAAqF;gBACrF,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,oIAAoI,CACrI,CAAC;gBACF,IAAI,CAAC,KAAK,CAAC,MAAA,GAAG,CAAC,KAAK,mCAAI,GAAG,CAAC,OAAO,CAAC,CAAC;gBACrC,OAAO,KAAK,CAAC;aACd;YAED,IAAM,SAAS,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;YACpC,IAAM,SAAS,GAAG,yCAAwB,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;YAC7D,kFAAkF;YAClF,IAAI,SAAS,EAAE;gBACb,IAAM,KAAK,GAAG,MAAA,IAAI,KAAK,EAAE,CAAC,KAAK,mCAAI,iCAAiC,CAAC;gBACrE,SAAS,CAAC,IAAI,CAAC,6CAA2C,KAAO,CAAC,CAAC;gBACnE,SAAS,CAAC,IAAI,CACZ,+DAA6D,KAAO,CACrE,CAAC;aACH;YAED,OAAO,6BAAc,CAAC,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC;QACvD,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG;YACb,+BAAgB,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;QACnC,CAAC,CAAC;QAEF,IAAI,CAAC,qBAAqB,GAAG,UAAC,OAA+B;YAC3D,OAAO,IAAI,qCAAmB,CAAC,OAAO,CAAC,CAAC;QAC1C,CAAC,CAAC;QAEF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAC,SAAS,CAAC,CAAC;QACpC,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;QAChC,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,IAAI,GAAG,SAAS,CAAC,MAAM,CAAC,CAAC;QAC9B,IAAI,CAAC,KAAK,GAAG,SAAS,CAAC,OAAO,CAAC,CAAC;IAClC,CAAC;IAtED,oDAAoD;IACtC,gBAAQ,GAAtB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,OAAO,EAAE,CAAC;SAChC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IA4FH,cAAC;AAAD,CAAC,AAtGD,IAsGC;AAtGY,0BAAO","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagComponentLogger } from '../diag/ComponentLogger';\nimport { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger';\nimport {\n ComponentLoggerOptions,\n DiagLogFunction,\n DiagLogger,\n DiagLogLevel,\n} from '../diag/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\n\nconst API_NAME = 'diag';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry internal\n * diagnostic API\n */\nexport class DiagAPI implements DiagLogger {\n private static _instance?: DiagAPI;\n\n /** Get the singleton instance of the DiagAPI API */\n public static instance(): DiagAPI {\n if (!this._instance) {\n this._instance = new DiagAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Private internal constructor\n * @private\n */\n private constructor() {\n function _logProxy(funcName: keyof DiagLogger): DiagLogFunction {\n return function (...args) {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) return;\n return logger[funcName](...args);\n };\n }\n\n // Using self local variable for minification purposes as 'this' cannot be minified\n const self = this;\n\n // DiagAPI specific functions\n\n self.setLogger = (\n logger: DiagLogger,\n logLevel: DiagLogLevel = DiagLogLevel.INFO\n ) => {\n if (logger === self) {\n // There isn't much we can do here.\n // Logging to the console might break the user application.\n // Try to log to self. If a logger was previously registered it will receive the log.\n const err = new Error(\n 'Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'\n );\n self.error(err.stack ?? err.message);\n return false;\n }\n\n const oldLogger = getGlobal('diag');\n const newLogger = createLogLevelDiagLogger(logLevel, logger);\n // There already is an logger registered. We'll let it know before overwriting it.\n if (oldLogger) {\n const stack = new Error().stack ?? '';\n oldLogger.warn(`Current logger will be overwritten from ${stack}`);\n newLogger.warn(\n `Current logger will overwrite one already registered from ${stack}`\n );\n }\n\n return registerGlobal('diag', newLogger, self, true);\n };\n\n self.disable = () => {\n unregisterGlobal(API_NAME, self);\n };\n\n self.createComponentLogger = (options: ComponentLoggerOptions) => {\n return new DiagComponentLogger(options);\n };\n\n self.verbose = _logProxy('verbose');\n self.debug = _logProxy('debug');\n self.info = _logProxy('info');\n self.warn = _logProxy('warn');\n self.error = _logProxy('error');\n }\n\n /**\n * Set the global DiagLogger and DiagLogLevel.\n * If a global diag logger is already set, this will override it.\n *\n * @param logger - [Optional] The DiagLogger instance to set as the default logger.\n * @param logLevel - [Optional] The DiagLogLevel used to filter logs sent to the logger. If not provided it will default to INFO.\n * @returns true if the logger was successfully registered, else false\n */\n public setLogger!: (logger: DiagLogger, logLevel?: DiagLogLevel) => boolean;\n /**\n *\n */\n public createComponentLogger!: (\n options: ComponentLoggerOptions\n ) => DiagLogger;\n\n // DiagLogger implementation\n public verbose!: DiagLogFunction;\n public debug!: DiagLogFunction;\n public info!: DiagLogFunction;\n public warn!: DiagLogFunction;\n public error!: DiagLogFunction;\n\n /**\n * Unregister the global logger and return to Noop\n */\n public disable!: () => void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts new file mode 100644 index 0000000000..cf372cf20f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.d.ts @@ -0,0 +1,48 @@ +import { Context } from '../context/types'; +import { TextMapGetter, TextMapPropagator, TextMapSetter } from '../propagation/TextMapPropagator'; +import { getBaggage, setBaggage, deleteBaggage } from '../baggage/context-helpers'; +import { createBaggage } from '../baggage/utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +export declare class PropagationAPI { + private static _instance?; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Propagator API */ + static getInstance(): PropagationAPI; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + setGlobalPropagator(propagator: TextMapPropagator): boolean; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + inject(context: Context, carrier: Carrier, setter?: TextMapSetter): void; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + extract(context: Context, carrier: Carrier, getter?: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; + /** Remove the global propagator */ + disable(): void; + createBaggage: typeof createBaggage; + getBaggage: typeof getBaggage; + setBaggage: typeof setBaggage; + deleteBaggage: typeof deleteBaggage; + private _getGlobalPropagator; +} +//# sourceMappingURL=propagation.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js b/node_modules/@opentelemetry/api/build/src/api/propagation.js new file mode 100644 index 0000000000..f8ef934379 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js @@ -0,0 +1,91 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PropagationAPI = void 0; +var global_utils_1 = require("../internal/global-utils"); +var NoopTextMapPropagator_1 = require("../propagation/NoopTextMapPropagator"); +var TextMapPropagator_1 = require("../propagation/TextMapPropagator"); +var context_helpers_1 = require("../baggage/context-helpers"); +var utils_1 = require("../baggage/utils"); +var diag_1 = require("./diag"); +var API_NAME = 'propagation'; +var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); +/** + * Singleton object which represents the entry point to the OpenTelemetry Propagation API + */ +var PropagationAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function PropagationAPI() { + this.createBaggage = utils_1.createBaggage; + this.getBaggage = context_helpers_1.getBaggage; + this.setBaggage = context_helpers_1.setBaggage; + this.deleteBaggage = context_helpers_1.deleteBaggage; + } + /** Get the singleton instance of the Propagator API */ + PropagationAPI.getInstance = function () { + if (!this._instance) { + this._instance = new PropagationAPI(); + } + return this._instance; + }; + /** + * Set the current propagator. + * + * @returns true if the propagator was successfully registered, else false + */ + PropagationAPI.prototype.setGlobalPropagator = function (propagator) { + return global_utils_1.registerGlobal(API_NAME, propagator, diag_1.DiagAPI.instance()); + }; + /** + * Inject context into a carrier to be propagated inter-process + * + * @param context Context carrying tracing data to inject + * @param carrier carrier to inject context into + * @param setter Function used to set values on the carrier + */ + PropagationAPI.prototype.inject = function (context, carrier, setter) { + if (setter === void 0) { setter = TextMapPropagator_1.defaultTextMapSetter; } + return this._getGlobalPropagator().inject(context, carrier, setter); + }; + /** + * Extract context from a carrier + * + * @param context Context which the newly created context will inherit from + * @param carrier Carrier to extract context from + * @param getter Function used to extract keys from a carrier + */ + PropagationAPI.prototype.extract = function (context, carrier, getter) { + if (getter === void 0) { getter = TextMapPropagator_1.defaultTextMapGetter; } + return this._getGlobalPropagator().extract(context, carrier, getter); + }; + /** + * Return a list of all fields which may be used by the propagator. + */ + PropagationAPI.prototype.fields = function () { + return this._getGlobalPropagator().fields(); + }; + /** Remove the global propagator */ + PropagationAPI.prototype.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + }; + PropagationAPI.prototype._getGlobalPropagator = function () { + return global_utils_1.getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; + }; + return PropagationAPI; +}()); +exports.PropagationAPI = PropagationAPI; +//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js.map b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map new file mode 100644 index 0000000000..31abb05f05 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/propagation.js.map @@ -0,0 +1 @@ +{"version":3,"file":"propagation.js","sourceRoot":"","sources":["../../../src/api/propagation.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,yDAIkC;AAClC,8EAA6E;AAC7E,sEAM0C;AAC1C,8DAIoC;AACpC,0CAAiD;AACjD,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,aAAa,CAAC;AAC/B,IAAM,wBAAwB,GAAG,IAAI,6CAAqB,EAAE,CAAC;AAE7D;;GAEG;AACH;IAGE,+FAA+F;IAC/F;QA8DO,kBAAa,GAAG,qBAAa,CAAC;QAE9B,eAAU,GAAG,4BAAU,CAAC;QAExB,eAAU,GAAG,4BAAU,CAAC;QAExB,kBAAa,GAAG,+BAAa,CAAC;IApEd,CAAC;IAExB,uDAAuD;IACzC,0BAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,cAAc,EAAE,CAAC;SACvC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,4CAAmB,GAA1B,UAA2B,UAA6B;QACtD,OAAO,6BAAc,CAAC,QAAQ,EAAE,UAAU,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IAClE,CAAC;IAED;;;;;;OAMG;IACI,+BAAM,GAAb,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACtE,CAAC;IAED;;;;;;OAMG;IACI,gCAAO,GAAd,UACE,OAAgB,EAChB,OAAgB,EAChB,MAAqD;QAArD,uBAAA,EAAA,SAAiC,wCAAoB;QAErD,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,OAAO,CAAC,OAAO,EAAE,OAAO,EAAE,MAAM,CAAC,CAAC;IACvE,CAAC;IAED;;OAEG;IACI,+BAAM,GAAb;QACE,OAAO,IAAI,CAAC,oBAAoB,EAAE,CAAC,MAAM,EAAE,CAAC;IAC9C,CAAC;IAED,mCAAmC;IAC5B,gCAAO,GAAd;QACE,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;IACjD,CAAC;IAUO,6CAAoB,GAA5B;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,wBAAwB,CAAC;IACzD,CAAC;IACH,qBAAC;AAAD,CAAC,AA7ED,IA6EC;AA7EY,wCAAc","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator';\nimport {\n defaultTextMapGetter,\n defaultTextMapSetter,\n TextMapGetter,\n TextMapPropagator,\n TextMapSetter,\n} from '../propagation/TextMapPropagator';\nimport {\n getBaggage,\n setBaggage,\n deleteBaggage,\n} from '../baggage/context-helpers';\nimport { createBaggage } from '../baggage/utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'propagation';\nconst NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator();\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Propagation API\n */\nexport class PropagationAPI {\n private static _instance?: PropagationAPI;\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Propagator API */\n public static getInstance(): PropagationAPI {\n if (!this._instance) {\n this._instance = new PropagationAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current propagator.\n *\n * @returns true if the propagator was successfully registered, else false\n */\n public setGlobalPropagator(propagator: TextMapPropagator): boolean {\n return registerGlobal(API_NAME, propagator, DiagAPI.instance());\n }\n\n /**\n * Inject context into a carrier to be propagated inter-process\n *\n * @param context Context carrying tracing data to inject\n * @param carrier carrier to inject context into\n * @param setter Function used to set values on the carrier\n */\n public inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter = defaultTextMapSetter\n ): void {\n return this._getGlobalPropagator().inject(context, carrier, setter);\n }\n\n /**\n * Extract context from a carrier\n *\n * @param context Context which the newly created context will inherit from\n * @param carrier Carrier to extract context from\n * @param getter Function used to extract keys from a carrier\n */\n public extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter = defaultTextMapGetter\n ): Context {\n return this._getGlobalPropagator().extract(context, carrier, getter);\n }\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n public fields(): string[] {\n return this._getGlobalPropagator().fields();\n }\n\n /** Remove the global propagator */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n }\n\n public createBaggage = createBaggage;\n\n public getBaggage = getBaggage;\n\n public setBaggage = setBaggage;\n\n public deleteBaggage = deleteBaggage;\n\n private _getGlobalPropagator(): TextMapPropagator {\n return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.d.ts b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts new file mode 100644 index 0000000000..9135b74b69 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.d.ts @@ -0,0 +1,39 @@ +import { isSpanContextValid, wrapSpanContext } from '../trace/spancontext-utils'; +import { Tracer } from '../trace/tracer'; +import { TracerProvider } from '../trace/tracer_provider'; +import { deleteSpan, getSpan, getSpanContext, setSpan, setSpanContext } from '../trace/context-utils'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +export declare class TraceAPI { + private static _instance?; + private _proxyTracerProvider; + /** Empty private constructor prevents end users from constructing a new instance of the API */ + private constructor(); + /** Get the singleton instance of the Trace API */ + static getInstance(): TraceAPI; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + setGlobalTracerProvider(provider: TracerProvider): boolean; + /** + * Returns the global tracer provider. + */ + getTracerProvider(): TracerProvider; + /** + * Returns a tracer from the global tracer provider. + */ + getTracer(name: string, version?: string): Tracer; + /** Remove the global tracer provider */ + disable(): void; + wrapSpanContext: typeof wrapSpanContext; + isSpanContextValid: typeof isSpanContextValid; + deleteSpan: typeof deleteSpan; + getSpan: typeof getSpan; + getSpanContext: typeof getSpanContext; + setSpan: typeof setSpan; + setSpanContext: typeof setSpanContext; +} +//# sourceMappingURL=trace.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js b/node_modules/@opentelemetry/api/build/src/api/trace.js new file mode 100644 index 0000000000..6adb3bd701 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js @@ -0,0 +1,79 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceAPI = void 0; +var global_utils_1 = require("../internal/global-utils"); +var ProxyTracerProvider_1 = require("../trace/ProxyTracerProvider"); +var spancontext_utils_1 = require("../trace/spancontext-utils"); +var context_utils_1 = require("../trace/context-utils"); +var diag_1 = require("./diag"); +var API_NAME = 'trace'; +/** + * Singleton object which represents the entry point to the OpenTelemetry Tracing API + */ +var TraceAPI = /** @class */ (function () { + /** Empty private constructor prevents end users from constructing a new instance of the API */ + function TraceAPI() { + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; + this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; + this.deleteSpan = context_utils_1.deleteSpan; + this.getSpan = context_utils_1.getSpan; + this.getSpanContext = context_utils_1.getSpanContext; + this.setSpan = context_utils_1.setSpan; + this.setSpanContext = context_utils_1.setSpanContext; + } + /** Get the singleton instance of the Trace API */ + TraceAPI.getInstance = function () { + if (!this._instance) { + this._instance = new TraceAPI(); + } + return this._instance; + }; + /** + * Set the current global tracer. + * + * @returns true if the tracer provider was successfully registered, else false + */ + TraceAPI.prototype.setGlobalTracerProvider = function (provider) { + var success = global_utils_1.registerGlobal(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); + if (success) { + this._proxyTracerProvider.setDelegate(provider); + } + return success; + }; + /** + * Returns the global tracer provider. + */ + TraceAPI.prototype.getTracerProvider = function () { + return global_utils_1.getGlobal(API_NAME) || this._proxyTracerProvider; + }; + /** + * Returns a tracer from the global tracer provider. + */ + TraceAPI.prototype.getTracer = function (name, version) { + return this.getTracerProvider().getTracer(name, version); + }; + /** Remove the global tracer provider */ + TraceAPI.prototype.disable = function () { + global_utils_1.unregisterGlobal(API_NAME, diag_1.DiagAPI.instance()); + this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); + }; + return TraceAPI; +}()); +exports.TraceAPI = TraceAPI; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js.map b/node_modules/@opentelemetry/api/build/src/api/trace.js.map new file mode 100644 index 0000000000..2a49947a78 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/api/trace.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace.js","sourceRoot":"","sources":["../../../src/api/trace.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,yDAIkC;AAClC,oEAAmE;AACnE,gEAGoC;AAGpC,wDAMgC;AAChC,+BAAiC;AAEjC,IAAM,QAAQ,GAAG,OAAO,CAAC;AAEzB;;GAEG;AACH;IAKE,+FAA+F;IAC/F;QAHQ,yBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;QAmDlD,oBAAe,GAAG,mCAAe,CAAC;QAElC,uBAAkB,GAAG,sCAAkB,CAAC;QAExC,eAAU,GAAG,0BAAU,CAAC;QAExB,YAAO,GAAG,uBAAO,CAAC;QAElB,mBAAc,GAAG,8BAAc,CAAC;QAEhC,YAAO,GAAG,uBAAO,CAAC;QAElB,mBAAc,GAAG,8BAAc,CAAC;IA5DhB,CAAC;IAExB,kDAAkD;IACpC,oBAAW,GAAzB;QACE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE;YACnB,IAAI,CAAC,SAAS,GAAG,IAAI,QAAQ,EAAE,CAAC;SACjC;QAED,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IAED;;;;OAIG;IACI,0CAAuB,GAA9B,UAA+B,QAAwB;QACrD,IAAM,OAAO,GAAG,6BAAc,CAC5B,QAAQ,EACR,IAAI,CAAC,oBAAoB,EACzB,cAAO,CAAC,QAAQ,EAAE,CACnB,CAAC;QACF,IAAI,OAAO,EAAE;YACX,IAAI,CAAC,oBAAoB,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;SACjD;QACD,OAAO,OAAO,CAAC;IACjB,CAAC;IAED;;OAEG;IACI,oCAAiB,GAAxB;QACE,OAAO,wBAAS,CAAC,QAAQ,CAAC,IAAI,IAAI,CAAC,oBAAoB,CAAC;IAC1D,CAAC;IAED;;OAEG;IACI,4BAAS,GAAhB,UAAiB,IAAY,EAAE,OAAgB;QAC7C,OAAO,IAAI,CAAC,iBAAiB,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IAED,wCAAwC;IACjC,0BAAO,GAAd;QACE,+BAAgB,CAAC,QAAQ,EAAE,cAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;QAC/C,IAAI,CAAC,oBAAoB,GAAG,IAAI,yCAAmB,EAAE,CAAC;IACxD,CAAC;IAeH,eAAC;AAAD,CAAC,AAnED,IAmEC;AAnEY,4BAAQ","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n getGlobal,\n registerGlobal,\n unregisterGlobal,\n} from '../internal/global-utils';\nimport { ProxyTracerProvider } from '../trace/ProxyTracerProvider';\nimport {\n isSpanContextValid,\n wrapSpanContext,\n} from '../trace/spancontext-utils';\nimport { Tracer } from '../trace/tracer';\nimport { TracerProvider } from '../trace/tracer_provider';\nimport {\n deleteSpan,\n getSpan,\n getSpanContext,\n setSpan,\n setSpanContext,\n} from '../trace/context-utils';\nimport { DiagAPI } from './diag';\n\nconst API_NAME = 'trace';\n\n/**\n * Singleton object which represents the entry point to the OpenTelemetry Tracing API\n */\nexport class TraceAPI {\n private static _instance?: TraceAPI;\n\n private _proxyTracerProvider = new ProxyTracerProvider();\n\n /** Empty private constructor prevents end users from constructing a new instance of the API */\n private constructor() {}\n\n /** Get the singleton instance of the Trace API */\n public static getInstance(): TraceAPI {\n if (!this._instance) {\n this._instance = new TraceAPI();\n }\n\n return this._instance;\n }\n\n /**\n * Set the current global tracer.\n *\n * @returns true if the tracer provider was successfully registered, else false\n */\n public setGlobalTracerProvider(provider: TracerProvider): boolean {\n const success = registerGlobal(\n API_NAME,\n this._proxyTracerProvider,\n DiagAPI.instance()\n );\n if (success) {\n this._proxyTracerProvider.setDelegate(provider);\n }\n return success;\n }\n\n /**\n * Returns the global tracer provider.\n */\n public getTracerProvider(): TracerProvider {\n return getGlobal(API_NAME) || this._proxyTracerProvider;\n }\n\n /**\n * Returns a tracer from the global tracer provider.\n */\n public getTracer(name: string, version?: string): Tracer {\n return this.getTracerProvider().getTracer(name, version);\n }\n\n /** Remove the global tracer provider */\n public disable() {\n unregisterGlobal(API_NAME, DiagAPI.instance());\n this._proxyTracerProvider = new ProxyTracerProvider();\n }\n\n public wrapSpanContext = wrapSpanContext;\n\n public isSpanContextValid = isSpanContextValid;\n\n public deleteSpan = deleteSpan;\n\n public getSpan = getSpan;\n\n public getSpanContext = getSpanContext;\n\n public setSpan = setSpan;\n\n public setSpanContext = setSpanContext;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts new file mode 100644 index 0000000000..a387685b64 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.d.ts @@ -0,0 +1,23 @@ +import { Context } from '../context/types'; +import { Baggage } from './types'; +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +export declare function getBaggage(context: Context): Baggage | undefined; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +export declare function setBaggage(context: Context, baggage: Baggage): Context; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +export declare function deleteBaggage(context: Context): Context; +//# sourceMappingURL=context-helpers.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js new file mode 100644 index 0000000000..d8adc8cfdf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js @@ -0,0 +1,53 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deleteBaggage = exports.setBaggage = exports.getBaggage = void 0; +var context_1 = require("../context/context"); +/** + * Baggage key + */ +var BAGGAGE_KEY = context_1.createContextKey('OpenTelemetry Baggage Key'); +/** + * Retrieve the current baggage from the given context + * + * @param {Context} Context that manage all context values + * @returns {Baggage} Extracted baggage from the context + */ +function getBaggage(context) { + return context.getValue(BAGGAGE_KEY) || undefined; +} +exports.getBaggage = getBaggage; +/** + * Store a baggage in the given context + * + * @param {Context} Context that manage all context values + * @param {Baggage} baggage that will be set in the actual context + */ +function setBaggage(context, baggage) { + return context.setValue(BAGGAGE_KEY, baggage); +} +exports.setBaggage = setBaggage; +/** + * Delete the baggage stored in the given context + * + * @param {Context} Context that manage all context values + */ +function deleteBaggage(context) { + return context.deleteValue(BAGGAGE_KEY); +} +exports.deleteBaggage = deleteBaggage; +//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map new file mode 100644 index 0000000000..069c377bbd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-helpers.js","sourceRoot":"","sources":["../../../src/baggage/context-helpers.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,8CAAsD;AAItD;;GAEG;AACH,IAAM,WAAW,GAAG,0BAAgB,CAAC,2BAA2B,CAAC,CAAC;AAElE;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAQ,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAa,IAAI,SAAS,CAAC;AACjE,CAAC;AAFD,gCAEC;AAED;;;;;GAKG;AACH,SAAgB,UAAU,CAAC,OAAgB,EAAE,OAAgB;IAC3D,OAAO,OAAO,CAAC,QAAQ,CAAC,WAAW,EAAE,OAAO,CAAC,CAAC;AAChD,CAAC;AAFD,gCAEC;AAED;;;;GAIG;AACH,SAAgB,aAAa,CAAC,OAAgB;IAC5C,OAAO,OAAO,CAAC,WAAW,CAAC,WAAW,CAAC,CAAC;AAC1C,CAAC;AAFD,sCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Baggage } from './types';\n\n/**\n * Baggage key\n */\nconst BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key');\n\n/**\n * Retrieve the current baggage from the given context\n *\n * @param {Context} Context that manage all context values\n * @returns {Baggage} Extracted baggage from the context\n */\nexport function getBaggage(context: Context): Baggage | undefined {\n return (context.getValue(BAGGAGE_KEY) as Baggage) || undefined;\n}\n\n/**\n * Store a baggage in the given context\n *\n * @param {Context} Context that manage all context values\n * @param {Baggage} baggage that will be set in the actual context\n */\nexport function setBaggage(context: Context, baggage: Baggage): Context {\n return context.setValue(BAGGAGE_KEY, baggage);\n}\n\n/**\n * Delete the baggage stored in the given context\n *\n * @param {Context} Context that manage all context values\n */\nexport function deleteBaggage(context: Context): Context {\n return context.deleteValue(BAGGAGE_KEY);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts new file mode 100644 index 0000000000..e6b4554042 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.d.ts @@ -0,0 +1,12 @@ +import type { Baggage, BaggageEntry } from '../types'; +export declare class BaggageImpl implements Baggage { + private _entries; + constructor(entries?: Map); + getEntry(key: string): BaggageEntry | undefined; + getAllEntries(): [string, BaggageEntry][]; + setEntry(key: string, entry: BaggageEntry): BaggageImpl; + removeEntry(key: string): BaggageImpl; + removeEntries(...keys: string[]): BaggageImpl; + clear(): BaggageImpl; +} +//# sourceMappingURL=baggage-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js new file mode 100644 index 0000000000..47bfe6556d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js @@ -0,0 +1,64 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BaggageImpl = void 0; +var BaggageImpl = /** @class */ (function () { + function BaggageImpl(entries) { + this._entries = entries ? new Map(entries) : new Map(); + } + BaggageImpl.prototype.getEntry = function (key) { + var entry = this._entries.get(key); + if (!entry) { + return undefined; + } + return Object.assign({}, entry); + }; + BaggageImpl.prototype.getAllEntries = function () { + return Array.from(this._entries.entries()).map(function (_a) { + var k = _a[0], v = _a[1]; + return [k, v]; + }); + }; + BaggageImpl.prototype.setEntry = function (key, entry) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.set(key, entry); + return newBaggage; + }; + BaggageImpl.prototype.removeEntry = function (key) { + var newBaggage = new BaggageImpl(this._entries); + newBaggage._entries.delete(key); + return newBaggage; + }; + BaggageImpl.prototype.removeEntries = function () { + var keys = []; + for (var _i = 0; _i < arguments.length; _i++) { + keys[_i] = arguments[_i]; + } + var newBaggage = new BaggageImpl(this._entries); + for (var _a = 0, keys_1 = keys; _a < keys_1.length; _a++) { + var key = keys_1[_a]; + newBaggage._entries.delete(key); + } + return newBaggage; + }; + BaggageImpl.prototype.clear = function () { + return new BaggageImpl(); + }; + return BaggageImpl; +}()); +exports.BaggageImpl = BaggageImpl; +//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map new file mode 100644 index 0000000000..e3a998dd9b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"baggage-impl.js","sourceRoot":"","sources":["../../../../src/baggage/internal/baggage-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH;IAGE,qBAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;IACzD,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW;QAClB,IAAM,KAAK,GAAG,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;QACrC,IAAI,CAAC,KAAK,EAAE;YACV,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,KAAK,CAAC,CAAC;IAClC,CAAC;IAED,mCAAa,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,UAAC,EAAM;gBAAL,CAAC,QAAA,EAAE,CAAC,QAAA;YAAM,OAAA,CAAC,CAAC,EAAE,CAAC,CAAC;QAAN,CAAM,CAAC,CAAC;IACrE,CAAC;IAED,8BAAQ,GAAR,UAAS,GAAW,EAAE,KAAmB;QACvC,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QACpC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,iCAAW,GAAX,UAAY,GAAW;QACrB,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QAChC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,mCAAa,GAAb;QAAc,cAAiB;aAAjB,UAAiB,EAAjB,qBAAiB,EAAjB,IAAiB;YAAjB,yBAAiB;;QAC7B,IAAM,UAAU,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QAClD,KAAkB,UAAI,EAAJ,aAAI,EAAJ,kBAAI,EAAJ,IAAI,EAAE;YAAnB,IAAM,GAAG,aAAA;YACZ,UAAU,CAAC,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACjC;QACD,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,2BAAK,GAAL;QACE,OAAO,IAAI,WAAW,EAAE,CAAC;IAC3B,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC;AA3CY,kCAAW","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport type { Baggage, BaggageEntry } from '../types';\n\nexport class BaggageImpl implements Baggage {\n private _entries: Map;\n\n constructor(entries?: Map) {\n this._entries = entries ? new Map(entries) : new Map();\n }\n\n getEntry(key: string): BaggageEntry | undefined {\n const entry = this._entries.get(key);\n if (!entry) {\n return undefined;\n }\n\n return Object.assign({}, entry);\n }\n\n getAllEntries(): [string, BaggageEntry][] {\n return Array.from(this._entries.entries()).map(([k, v]) => [k, v]);\n }\n\n setEntry(key: string, entry: BaggageEntry): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.set(key, entry);\n return newBaggage;\n }\n\n removeEntry(key: string): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n newBaggage._entries.delete(key);\n return newBaggage;\n }\n\n removeEntries(...keys: string[]): BaggageImpl {\n const newBaggage = new BaggageImpl(this._entries);\n for (const key of keys) {\n newBaggage._entries.delete(key);\n }\n return newBaggage;\n }\n\n clear(): BaggageImpl {\n return new BaggageImpl();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts new file mode 100644 index 0000000000..9cd991c1cd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.d.ts @@ -0,0 +1,5 @@ +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +export declare const baggageEntryMetadataSymbol: unique symbol; +//# sourceMappingURL=symbol.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js new file mode 100644 index 0000000000..324c216dd5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js @@ -0,0 +1,23 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataSymbol = void 0; +/** + * Symbol used to make BaggageEntryMetadata an opaque type + */ +exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); +//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map new file mode 100644 index 0000000000..497f36272f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js.map @@ -0,0 +1 @@ +{"version":3,"file":"symbol.js","sourceRoot":"","sources":["../../../../src/baggage/internal/symbol.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH;;GAEG;AACU,QAAA,0BAA0B,GAAG,MAAM,CAAC,sBAAsB,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Symbol used to make BaggageEntryMetadata an opaque type\n */\nexport const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata');\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts new file mode 100644 index 0000000000..32fa0ec6bd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.d.ts @@ -0,0 +1,60 @@ +import { baggageEntryMetadataSymbol } from './internal/symbol'; +export interface BaggageEntry { + /** `String` value of the `BaggageEntry`. */ + value: string; + /** + * Metadata is an optional string property defined by the W3C baggage specification. + * It currently has no special meaning defined by the specification. + */ + metadata?: BaggageEntryMetadata; +} +/** + * Serializable Metadata defined by the W3C baggage specification. + * It currently has no special meaning defined by the OpenTelemetry or W3C. + */ +export declare type BaggageEntryMetadata = { + toString(): string; +} & { + __TYPE__: typeof baggageEntryMetadataSymbol; +}; +/** + * Baggage represents collection of key-value pairs with optional metadata. + * Each key of Baggage is associated with exactly one value. + * Baggage may be used to annotate and enrich telemetry data. + */ +export interface Baggage { + /** + * Get an entry from Baggage if it exists + * + * @param key The key which identifies the BaggageEntry + */ + getEntry(key: string): BaggageEntry | undefined; + /** + * Get a list of all entries in the Baggage + */ + getAllEntries(): [string, BaggageEntry][]; + /** + * Returns a new baggage with the entries from the current bag and the specified entry + * + * @param key string which identifies the baggage entry + * @param entry BaggageEntry for the given key + */ + setEntry(key: string, entry: BaggageEntry): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entry + * + * @param key key identifying the entry to be removed + */ + removeEntry(key: string): Baggage; + /** + * Returns a new baggage with the entries from the current bag except the removed entries + * + * @param key keys identifying the entries to be removed + */ + removeEntries(...key: string[]): Baggage; + /** + * Returns a new baggage with no entries + */ + clear(): Baggage; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js b/node_modules/@opentelemetry/api/build/src/baggage/types.js new file mode 100644 index 0000000000..c428c6d093 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js.map b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map new file mode 100644 index 0000000000..2a00da4975 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/baggage/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\n\n/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface BaggageEntry {\n /** `String` value of the `BaggageEntry`. */\n value: string;\n /**\n * Metadata is an optional string property defined by the W3C baggage specification.\n * It currently has no special meaning defined by the specification.\n */\n metadata?: BaggageEntryMetadata;\n}\n\n/**\n * Serializable Metadata defined by the W3C baggage specification.\n * It currently has no special meaning defined by the OpenTelemetry or W3C.\n */\nexport type BaggageEntryMetadata = { toString(): string } & {\n __TYPE__: typeof baggageEntryMetadataSymbol;\n};\n\n/**\n * Baggage represents collection of key-value pairs with optional metadata.\n * Each key of Baggage is associated with exactly one value.\n * Baggage may be used to annotate and enrich telemetry data.\n */\nexport interface Baggage {\n /**\n * Get an entry from Baggage if it exists\n *\n * @param key The key which identifies the BaggageEntry\n */\n getEntry(key: string): BaggageEntry | undefined;\n\n /**\n * Get a list of all entries in the Baggage\n */\n getAllEntries(): [string, BaggageEntry][];\n\n /**\n * Returns a new baggage with the entries from the current bag and the specified entry\n *\n * @param key string which identifies the baggage entry\n * @param entry BaggageEntry for the given key\n */\n setEntry(key: string, entry: BaggageEntry): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entry\n *\n * @param key key identifying the entry to be removed\n */\n removeEntry(key: string): Baggage;\n\n /**\n * Returns a new baggage with the entries from the current bag except the removed entries\n *\n * @param key keys identifying the entries to be removed\n */\n removeEntries(...key: string[]): Baggage;\n\n /**\n * Returns a new baggage with no entries\n */\n clear(): Baggage;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts new file mode 100644 index 0000000000..9955d9e285 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.d.ts @@ -0,0 +1,15 @@ +import { Baggage, BaggageEntry, BaggageEntryMetadata } from './types'; +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +export declare function createBaggage(entries?: Record): Baggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +export declare function baggageEntryMetadataFromString(str: string): BaggageEntryMetadata; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js b/node_modules/@opentelemetry/api/build/src/baggage/utils.js new file mode 100644 index 0000000000..782aaf7050 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js @@ -0,0 +1,52 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; +var diag_1 = require("../api/diag"); +var baggage_impl_1 = require("./internal/baggage-impl"); +var symbol_1 = require("./internal/symbol"); +var diag = diag_1.DiagAPI.instance(); +/** + * Create a new Baggage with optional entries + * + * @param entries An array of baggage entries the new baggage should contain + */ +function createBaggage(entries) { + if (entries === void 0) { entries = {}; } + return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); +} +exports.createBaggage = createBaggage; +/** + * Create a serializable BaggageEntryMetadata object from a string. + * + * @param str string metadata. Format is currently not defined by the spec and has no special meaning. + * + */ +function baggageEntryMetadataFromString(str) { + if (typeof str !== 'string') { + diag.error("Cannot create baggage metadata from unknown type: " + typeof str); + str = ''; + } + return { + __TYPE__: symbol_1.baggageEntryMetadataSymbol, + toString: function () { + return str; + }, + }; +} +exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map new file mode 100644 index 0000000000..f25f316a3a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/baggage/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../src/baggage/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,oCAAsC;AACtC,wDAAsD;AACtD,4CAA+D;AAG/D,IAAM,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC;AAEhC;;;;GAIG;AACH,SAAgB,aAAa,CAC3B,OAA0C;IAA1C,wBAAA,EAAA,YAA0C;IAE1C,OAAO,IAAI,0BAAW,CAAC,IAAI,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;AAC3D,CAAC;AAJD,sCAIC;AAED;;;;;GAKG;AACH,SAAgB,8BAA8B,CAC5C,GAAW;IAEX,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;QAC3B,IAAI,CAAC,KAAK,CACR,uDAAqD,OAAO,GAAK,CAClE,CAAC;QACF,GAAG,GAAG,EAAE,CAAC;KACV;IAED,OAAO;QACL,QAAQ,EAAE,mCAA0B;QACpC,QAAQ;YACN,OAAO,GAAG,CAAC;QACb,CAAC;KACF,CAAC;AACJ,CAAC;AAhBD,wEAgBC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagAPI } from '../api/diag';\nimport { BaggageImpl } from './internal/baggage-impl';\nimport { baggageEntryMetadataSymbol } from './internal/symbol';\nimport { Baggage, BaggageEntry, BaggageEntryMetadata } from './types';\n\nconst diag = DiagAPI.instance();\n\n/**\n * Create a new Baggage with optional entries\n *\n * @param entries An array of baggage entries the new baggage should contain\n */\nexport function createBaggage(\n entries: Record = {}\n): Baggage {\n return new BaggageImpl(new Map(Object.entries(entries)));\n}\n\n/**\n * Create a serializable BaggageEntryMetadata object from a string.\n *\n * @param str string metadata. Format is currently not defined by the spec and has no special meaning.\n *\n */\nexport function baggageEntryMetadataFromString(\n str: string\n): BaggageEntryMetadata {\n if (typeof str !== 'string') {\n diag.error(\n `Cannot create baggage metadata from unknown type: ${typeof str}`\n );\n str = '';\n }\n\n return {\n __TYPE__: baggageEntryMetadataSymbol,\n toString() {\n return str;\n },\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts new file mode 100644 index 0000000000..19994fb2a8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.d.ts @@ -0,0 +1,15 @@ +/** + * Attributes is a map from string to attribute values. + * + * Note: only the own enumerable keys are counted as valid attribute keys. + */ +export interface Attributes { + [attributeKey: string]: AttributeValue | undefined; +} +/** + * Attribute values may be any non-nullish primitive value except an object. + * + * null or undefined attribute values are invalid and will result in undefined behavior. + */ +export declare type AttributeValue = string | number | boolean | Array | Array | Array; +//# sourceMappingURL=Attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js b/node_modules/@opentelemetry/api/build/src/common/Attributes.js new file mode 100644 index 0000000000..684c93db92 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map new file mode 100644 index 0000000000..ff80e8e52a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Attributes.js","sourceRoot":"","sources":["../../../src/common/Attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * Attributes is a map from string to attribute values.\n *\n * Note: only the own enumerable keys are counted as valid attribute keys.\n */\nexport interface Attributes {\n [attributeKey: string]: AttributeValue | undefined;\n}\n\n/**\n * Attribute values may be any non-nullish primitive value except an object.\n *\n * null or undefined attribute values are invalid and will result in undefined behavior.\n */\nexport type AttributeValue =\n | string\n | number\n | boolean\n | Array\n | Array\n | Array;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts new file mode 100644 index 0000000000..e175a7fdc8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.d.ts @@ -0,0 +1,26 @@ +interface ExceptionWithCode { + code: string | number; + name?: string; + message?: string; + stack?: string; +} +interface ExceptionWithMessage { + code?: string | number; + message: string; + name?: string; + stack?: string; +} +interface ExceptionWithName { + code?: string | number; + message?: string; + name: string; + stack?: string; +} +/** + * Defines Exception. + * + * string or an object with one of (message or name or code) and optional stack + */ +export declare type Exception = ExceptionWithCode | ExceptionWithMessage | ExceptionWithName | string; +export {}; +//# sourceMappingURL=Exception.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js b/node_modules/@opentelemetry/api/build/src/common/Exception.js new file mode 100644 index 0000000000..ed450aef6e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js.map b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map new file mode 100644 index 0000000000..459c350a0a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Exception.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Exception.js","sourceRoot":"","sources":["../../../src/common/Exception.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\ninterface ExceptionWithCode {\n code: string | number;\n name?: string;\n message?: string;\n stack?: string;\n}\n\ninterface ExceptionWithMessage {\n code?: string | number;\n message: string;\n name?: string;\n stack?: string;\n}\n\ninterface ExceptionWithName {\n code?: string | number;\n message?: string;\n name: string;\n stack?: string;\n}\n\n/**\n * Defines Exception.\n *\n * string or an object with one of (message or name or code) and optional stack\n */\nexport type Exception =\n | ExceptionWithCode\n | ExceptionWithMessage\n | ExceptionWithName\n | string;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.d.ts b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts new file mode 100644 index 0000000000..cc3c502c12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.d.ts @@ -0,0 +1,20 @@ +/** + * Defines High-Resolution Time. + * + * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970. + * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds. + * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150. + * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds: + * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210. + * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds: + * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000. + * This is represented in HrTime format as [1609504210, 150000000]. + */ +export declare type HrTime = [number, number]; +/** + * Defines TimeInput. + * + * hrtime, epoch milliseconds, performance.now() or Date + */ +export declare type TimeInput = HrTime | number | Date; +//# sourceMappingURL=Time.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js b/node_modules/@opentelemetry/api/build/src/common/Time.js new file mode 100644 index 0000000000..1faaf6983b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js.map b/node_modules/@opentelemetry/api/build/src/common/Time.js.map new file mode 100644 index 0000000000..ae124f0361 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/common/Time.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Time.js","sourceRoot":"","sources":["../../../src/common/Time.ts"],"names":[],"mappings":"","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n/**\n * Defines High-Resolution Time.\n *\n * The first number, HrTime[0], is UNIX Epoch time in seconds since 00:00:00 UTC on 1 January 1970.\n * The second number, HrTime[1], represents the partial second elapsed since Unix Epoch time represented by first number in nanoseconds.\n * For example, 2021-01-01T12:30:10.150Z in UNIX Epoch time in milliseconds is represented as 1609504210150.\n * The first number is calculated by converting and truncating the Epoch time in milliseconds to seconds:\n * HrTime[0] = Math.trunc(1609504210150 / 1000) = 1609504210.\n * The second number is calculated by converting the digits after the decimal point of the subtraction, (1609504210150 / 1000) - HrTime[0], to nanoseconds:\n * HrTime[1] = Number((1609504210.150 - HrTime[0]).toFixed(9)) * 1e9 = 150000000.\n * This is represented in HrTime format as [1609504210, 150000000].\n */\nexport type HrTime = [number, number];\n\n/**\n * Defines TimeInput.\n *\n * hrtime, epoch milliseconds, performance.now() or Date\n */\nexport type TimeInput = HrTime | number | Date;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts new file mode 100644 index 0000000000..48a165970c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.d.ts @@ -0,0 +1,9 @@ +import * as types from './types'; +export declare class NoopContextManager implements types.ContextManager { + active(): types.Context; + with ReturnType>(_context: types.Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + bind(_context: types.Context, target: T): T; + enable(): this; + disable(): this; +} +//# sourceMappingURL=NoopContextManager.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js new file mode 100644 index 0000000000..b1e96dd71a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js @@ -0,0 +1,50 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __spreadArray = (this && this.__spreadArray) || function (to, from) { + for (var i = 0, il = from.length, j = to.length; i < il; i++, j++) + to[j] = from[i]; + return to; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopContextManager = void 0; +var context_1 = require("./context"); +var NoopContextManager = /** @class */ (function () { + function NoopContextManager() { + } + NoopContextManager.prototype.active = function () { + return context_1.ROOT_CONTEXT; + }; + NoopContextManager.prototype.with = function (_context, fn, thisArg) { + var args = []; + for (var _i = 3; _i < arguments.length; _i++) { + args[_i - 3] = arguments[_i]; + } + return fn.call.apply(fn, __spreadArray([thisArg], args)); + }; + NoopContextManager.prototype.bind = function (_context, target) { + return target; + }; + NoopContextManager.prototype.enable = function () { + return this; + }; + NoopContextManager.prototype.disable = function () { + return this; + }; + return NoopContextManager; +}()); +exports.NoopContextManager = NoopContextManager; +//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map new file mode 100644 index 0000000000..3c8e2fb56d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopContextManager.js","sourceRoot":"","sources":["../../../src/context/NoopContextManager.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;AAEH,qCAAyC;AAGzC;IAAA;IAyBA,CAAC;IAxBC,mCAAM,GAAN;QACE,OAAO,sBAAY,CAAC;IACtB,CAAC;IAED,iCAAI,GAAJ,UACE,QAAuB,EACvB,EAAK,EACL,OAA8B;QAC9B,cAAU;aAAV,UAAU,EAAV,qBAAU,EAAV,IAAU;YAAV,6BAAU;;QAEV,OAAO,EAAE,CAAC,IAAI,OAAP,EAAE,iBAAM,OAAO,GAAK,IAAI,GAAE;IACnC,CAAC;IAED,iCAAI,GAAJ,UAAQ,QAAuB,EAAE,MAAS;QACxC,OAAO,MAAM,CAAC;IAChB,CAAC;IAED,mCAAM,GAAN;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IAED,oCAAO,GAAP;QACE,OAAO,IAAI,CAAC;IACd,CAAC;IACH,yBAAC;AAAD,CAAC,AAzBD,IAyBC;AAzBY,gDAAkB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ROOT_CONTEXT } from './context';\nimport * as types from './types';\n\nexport class NoopContextManager implements types.ContextManager {\n active(): types.Context {\n return ROOT_CONTEXT;\n }\n\n with ReturnType>(\n _context: types.Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType {\n return fn.call(thisArg, ...args);\n }\n\n bind(_context: types.Context, target: T): T {\n return target;\n }\n\n enable(): this {\n return this;\n }\n\n disable(): this {\n return this;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.d.ts b/node_modules/@opentelemetry/api/build/src/context/context.d.ts new file mode 100644 index 0000000000..8be0259401 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.d.ts @@ -0,0 +1,6 @@ +import { Context } from './types'; +/** Get a key to uniquely identify a context value */ +export declare function createContextKey(description: string): symbol; +/** The root context is used as the default parent context when there is no active context */ +export declare const ROOT_CONTEXT: Context; +//# sourceMappingURL=context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js b/node_modules/@opentelemetry/api/build/src/context/context.js new file mode 100644 index 0000000000..67334f5603 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js @@ -0,0 +1,56 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ROOT_CONTEXT = exports.createContextKey = void 0; +/** Get a key to uniquely identify a context value */ +function createContextKey(description) { + // The specification states that for the same input, multiple calls should + // return different keys. Due to the nature of the JS dependency management + // system, this creates problems where multiple versions of some package + // could hold different keys for the same property. + // + // Therefore, we use Symbol.for which returns the same key for the same input. + return Symbol.for(description); +} +exports.createContextKey = createContextKey; +var BaseContext = /** @class */ (function () { + /** + * Construct a new context which inherits values from an optional parent context. + * + * @param parentContext a context from which to inherit values + */ + function BaseContext(parentContext) { + // for minification + var self = this; + self._currentContext = parentContext ? new Map(parentContext) : new Map(); + self.getValue = function (key) { return self._currentContext.get(key); }; + self.setValue = function (key, value) { + var context = new BaseContext(self._currentContext); + context._currentContext.set(key, value); + return context; + }; + self.deleteValue = function (key) { + var context = new BaseContext(self._currentContext); + context._currentContext.delete(key); + return context; + }; + } + return BaseContext; +}()); +/** The root context is used as the default parent context when there is no active context */ +exports.ROOT_CONTEXT = new BaseContext(); +//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js.map b/node_modules/@opentelemetry/api/build/src/context/context.js.map new file mode 100644 index 0000000000..7f5dc892c6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context.js","sourceRoot":"","sources":["../../../src/context/context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,qDAAqD;AACrD,SAAgB,gBAAgB,CAAC,WAAmB;IAClD,0EAA0E;IAC1E,2EAA2E;IAC3E,wEAAwE;IACxE,mDAAmD;IACnD,EAAE;IACF,8EAA8E;IAC9E,OAAO,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC;AACjC,CAAC;AARD,4CAQC;AAED;IAGE;;;;OAIG;IACH,qBAAY,aAAoC;QAC9C,mBAAmB;QACnB,IAAM,IAAI,GAAG,IAAI,CAAC;QAElB,IAAI,CAAC,eAAe,GAAG,aAAa,CAAC,CAAC,CAAC,IAAI,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE,CAAC;QAE1E,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,IAAK,OAAA,IAAI,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,CAAC,EAA7B,CAA6B,CAAC;QAE/D,IAAI,CAAC,QAAQ,GAAG,UAAC,GAAW,EAAE,KAAc;YAC1C,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;YACxC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;QAEF,IAAI,CAAC,WAAW,GAAG,UAAC,GAAW;YAC7B,IAAM,OAAO,GAAG,IAAI,WAAW,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC;YACtD,OAAO,CAAC,eAAe,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACpC,OAAO,OAAO,CAAC;QACjB,CAAC,CAAC;IACJ,CAAC;IAyBH,kBAAC;AAAD,CAAC,AApDD,IAoDC;AAED,6FAA6F;AAChF,QAAA,YAAY,GAAY,IAAI,WAAW,EAAE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from './types';\n\n/** Get a key to uniquely identify a context value */\nexport function createContextKey(description: string) {\n // The specification states that for the same input, multiple calls should\n // return different keys. Due to the nature of the JS dependency management\n // system, this creates problems where multiple versions of some package\n // could hold different keys for the same property.\n //\n // Therefore, we use Symbol.for which returns the same key for the same input.\n return Symbol.for(description);\n}\n\nclass BaseContext implements Context {\n private _currentContext!: Map;\n\n /**\n * Construct a new context which inherits values from an optional parent context.\n *\n * @param parentContext a context from which to inherit values\n */\n constructor(parentContext?: Map) {\n // for minification\n const self = this;\n\n self._currentContext = parentContext ? new Map(parentContext) : new Map();\n\n self.getValue = (key: symbol) => self._currentContext.get(key);\n\n self.setValue = (key: symbol, value: unknown): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.set(key, value);\n return context;\n };\n\n self.deleteValue = (key: symbol): Context => {\n const context = new BaseContext(self._currentContext);\n context._currentContext.delete(key);\n return context;\n };\n }\n\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n public getValue!: (key: symbol) => unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n public setValue!: (key: symbol, value: unknown) => Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n public deleteValue!: (key: symbol) => Context;\n}\n\n/** The root context is used as the default parent context when there is no active context */\nexport const ROOT_CONTEXT: Context = new BaseContext();\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.d.ts b/node_modules/@opentelemetry/api/build/src/context/types.d.ts new file mode 100644 index 0000000000..7e866320cf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.d.ts @@ -0,0 +1,52 @@ +export interface Context { + /** + * Get a value from the context. + * + * @param key key which identifies a context value + */ + getValue(key: symbol): unknown; + /** + * Create a new context which inherits from this context and has + * the given key set to the given value. + * + * @param key context key for which to set the value + * @param value value to set for the given key + */ + setValue(key: symbol, value: unknown): Context; + /** + * Return a new context which inherits from this context but does + * not contain a value for the given key. + * + * @param key context key for which to clear a value + */ + deleteValue(key: symbol): Context; +} +export interface ContextManager { + /** + * Get the current active context + */ + active(): Context; + /** + * Run the fn callback with object set as the current active context + * @param context Any object to set as the current active context + * @param fn A callback to be immediately run within a specific context + * @param thisArg optional receiver to be used for calling fn + * @param args optional arguments forwarded to fn + */ + with ReturnType>(context: Context, fn: F, thisArg?: ThisParameterType, ...args: A): ReturnType; + /** + * Bind an object as the current context (or a specific one) + * @param [context] Optionally specify the context which you want to assign + * @param target Any object to which a context need to be set + */ + bind(context: Context, target: T): T; + /** + * Enable context management + */ + enable(): this; + /** + * Disable context management + */ + disable(): this; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js b/node_modules/@opentelemetry/api/build/src/context/types.js new file mode 100644 index 0000000000..c428c6d093 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js.map b/node_modules/@opentelemetry/api/build/src/context/types.js.map new file mode 100644 index 0000000000..8bde9ce326 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/context/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/context/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface Context {\n /**\n * Get a value from the context.\n *\n * @param key key which identifies a context value\n */\n getValue(key: symbol): unknown;\n\n /**\n * Create a new context which inherits from this context and has\n * the given key set to the given value.\n *\n * @param key context key for which to set the value\n * @param value value to set for the given key\n */\n setValue(key: symbol, value: unknown): Context;\n\n /**\n * Return a new context which inherits from this context but does\n * not contain a value for the given key.\n *\n * @param key context key for which to clear a value\n */\n deleteValue(key: symbol): Context;\n}\n\nexport interface ContextManager {\n /**\n * Get the current active context\n */\n active(): Context;\n\n /**\n * Run the fn callback with object set as the current active context\n * @param context Any object to set as the current active context\n * @param fn A callback to be immediately run within a specific context\n * @param thisArg optional receiver to be used for calling fn\n * @param args optional arguments forwarded to fn\n */\n with ReturnType>(\n context: Context,\n fn: F,\n thisArg?: ThisParameterType,\n ...args: A\n ): ReturnType;\n\n /**\n * Bind an object as the current context (or a specific one)\n * @param [context] Optionally specify the context which you want to assign\n * @param target Any object to which a context need to be set\n */\n bind(context: Context, target: T): T;\n\n /**\n * Enable context management\n */\n enable(): this;\n\n /**\n * Disable context management\n */\n disable(): this;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts new file mode 100644 index 0000000000..f06095031a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.d.ts @@ -0,0 +1,20 @@ +import { ComponentLoggerOptions, DiagLogger } from './types'; +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +export declare class DiagComponentLogger implements DiagLogger { + private _namespace; + constructor(props: ComponentLoggerOptions); + debug(...args: any[]): void; + error(...args: any[]): void; + info(...args: any[]): void; + warn(...args: any[]): void; + verbose(...args: any[]): void; +} +//# sourceMappingURL=ComponentLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js new file mode 100644 index 0000000000..0a6a0e0be1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js @@ -0,0 +1,80 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagComponentLogger = void 0; +var global_utils_1 = require("../internal/global-utils"); +/** + * Component Logger which is meant to be used as part of any component which + * will add automatically additional namespace in front of the log message. + * It will then forward all message to global diag logger + * @example + * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); + * cLogger.debug('test'); + * // @opentelemetry/instrumentation-http test + */ +var DiagComponentLogger = /** @class */ (function () { + function DiagComponentLogger(props) { + this._namespace = props.namespace || 'DiagComponentLogger'; + } + DiagComponentLogger.prototype.debug = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('debug', this._namespace, args); + }; + DiagComponentLogger.prototype.error = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('error', this._namespace, args); + }; + DiagComponentLogger.prototype.info = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('info', this._namespace, args); + }; + DiagComponentLogger.prototype.warn = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('warn', this._namespace, args); + }; + DiagComponentLogger.prototype.verbose = function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + return logProxy('verbose', this._namespace, args); + }; + return DiagComponentLogger; +}()); +exports.DiagComponentLogger = DiagComponentLogger; +function logProxy(funcName, namespace, args) { + var logger = global_utils_1.getGlobal('diag'); + // shortcut if logger not set + if (!logger) { + return; + } + args.unshift(namespace); + return logger[funcName].apply(logger, args); +} +//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map new file mode 100644 index 0000000000..4a876170a1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ComponentLogger.js","sourceRoot":"","sources":["../../../src/diag/ComponentLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,yDAAqD;AAGrD;;;;;;;;GAQG;AACH;IAGE,6BAAY,KAA6B;QACvC,IAAI,CAAC,UAAU,GAAG,KAAK,CAAC,SAAS,IAAI,qBAAqB,CAAC;IAC7D,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,mCAAK,GAAZ;QAAa,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACzB,OAAO,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IAClD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,kCAAI,GAAX;QAAY,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QACxB,OAAO,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACjD,CAAC;IAEM,qCAAO,GAAd;QAAe,cAAc;aAAd,UAAc,EAAd,qBAAc,EAAd,IAAc;YAAd,yBAAc;;QAC3B,OAAO,QAAQ,CAAC,SAAS,EAAE,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;IACpD,CAAC;IACH,0BAAC;AAAD,CAAC,AA1BD,IA0BC;AA1BY,kDAAmB;AA4BhC,SAAS,QAAQ,CACf,QAA0B,EAC1B,SAAiB,EACjB,IAAS;IAET,IAAM,MAAM,GAAG,wBAAS,CAAC,MAAM,CAAC,CAAC;IACjC,6BAA6B;IAC7B,IAAI,CAAC,MAAM,EAAE;QACX,OAAO;KACR;IAED,IAAI,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;IACxB,OAAO,MAAM,CAAC,QAAQ,CAAC,OAAhB,MAAM,EAAe,IAAoC,EAAE;AACpE,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { getGlobal } from '../internal/global-utils';\nimport { ComponentLoggerOptions, DiagLogger, DiagLogFunction } from './types';\n\n/**\n * Component Logger which is meant to be used as part of any component which\n * will add automatically additional namespace in front of the log message.\n * It will then forward all message to global diag logger\n * @example\n * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' });\n * cLogger.debug('test');\n * // @opentelemetry/instrumentation-http test\n */\nexport class DiagComponentLogger implements DiagLogger {\n private _namespace: string;\n\n constructor(props: ComponentLoggerOptions) {\n this._namespace = props.namespace || 'DiagComponentLogger';\n }\n\n public debug(...args: any[]): void {\n return logProxy('debug', this._namespace, args);\n }\n\n public error(...args: any[]): void {\n return logProxy('error', this._namespace, args);\n }\n\n public info(...args: any[]): void {\n return logProxy('info', this._namespace, args);\n }\n\n public warn(...args: any[]): void {\n return logProxy('warn', this._namespace, args);\n }\n\n public verbose(...args: any[]): void {\n return logProxy('verbose', this._namespace, args);\n }\n}\n\nfunction logProxy(\n funcName: keyof DiagLogger,\n namespace: string,\n args: any\n): void {\n const logger = getGlobal('diag');\n // shortcut if logger not set\n if (!logger) {\n return;\n }\n\n args.unshift(namespace);\n return logger[funcName](...(args as Parameters));\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts new file mode 100644 index 0000000000..fa3db1e318 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.d.ts @@ -0,0 +1,38 @@ +import { DiagLogger, DiagLogFunction } from './types'; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +export declare class DiagConsoleLogger implements DiagLogger { + constructor(); + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. Useful scenarios would be to log the execution + * order of async operations + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +//# sourceMappingURL=consoleLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js new file mode 100644 index 0000000000..f0ea94ef51 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js @@ -0,0 +1,62 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagConsoleLogger = void 0; +var consoleMap = [ + { n: 'error', c: 'error' }, + { n: 'warn', c: 'warn' }, + { n: 'info', c: 'info' }, + { n: 'debug', c: 'debug' }, + { n: 'verbose', c: 'trace' }, +]; +/** + * A simple Immutable Console based diagnostic logger which will output any messages to the Console. + * If you want to limit the amount of logging to a specific level or lower use the + * {@link createLogLevelDiagLogger} + */ +var DiagConsoleLogger = /** @class */ (function () { + function DiagConsoleLogger() { + function _consoleFunc(funcName) { + return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } + if (console) { + // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console + var theFunc = console[funcName]; + if (typeof theFunc !== 'function') { + // Not all environments support all functions + // eslint-disable-next-line no-console + theFunc = console.log; + } + // One last final check + if (typeof theFunc === 'function') { + return theFunc.apply(console, args); + } + } + }; + } + for (var i = 0; i < consoleMap.length; i++) { + this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); + } + } + return DiagConsoleLogger; +}()); +exports.DiagConsoleLogger = DiagConsoleLogger; +//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map new file mode 100644 index 0000000000..3b71b2f78f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"consoleLogger.js","sourceRoot":"","sources":["../../../src/diag/consoleLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,IAAM,UAAU,GAAiD;IAC/D,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE;IACxB,EAAE,CAAC,EAAE,OAAO,EAAE,CAAC,EAAE,OAAO,EAAE;IAC1B,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,OAAO,EAAE;CAC7B,CAAC;AAEF;;;;GAIG;AACH;IACE;QACE,SAAS,YAAY,CAAC,QAAwB;YAC5C,OAAO;gBAAU,cAAO;qBAAP,UAAO,EAAP,qBAAO,EAAP,IAAO;oBAAP,yBAAO;;gBACtB,IAAI,OAAO,EAAE;oBACX,mFAAmF;oBACnF,sCAAsC;oBACtC,IAAI,OAAO,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;oBAChC,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,6CAA6C;wBAC7C,sCAAsC;wBACtC,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC;qBACvB;oBAED,uBAAuB;oBACvB,IAAI,OAAO,OAAO,KAAK,UAAU,EAAE;wBACjC,OAAO,OAAO,CAAC,KAAK,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;qBACrC;iBACF;YACH,CAAC,CAAC;QACJ,CAAC;QAED,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC1C,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,YAAY,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACvD;IACH,CAAC;IAkCH,wBAAC;AAAD,CAAC,AA3DD,IA2DC;AA3DY,8CAAiB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger, DiagLogFunction } from './types';\n\ntype ConsoleMapKeys = 'error' | 'warn' | 'info' | 'debug' | 'trace';\nconst consoleMap: { n: keyof DiagLogger; c: ConsoleMapKeys }[] = [\n { n: 'error', c: 'error' },\n { n: 'warn', c: 'warn' },\n { n: 'info', c: 'info' },\n { n: 'debug', c: 'debug' },\n { n: 'verbose', c: 'trace' },\n];\n\n/**\n * A simple Immutable Console based diagnostic logger which will output any messages to the Console.\n * If you want to limit the amount of logging to a specific level or lower use the\n * {@link createLogLevelDiagLogger}\n */\nexport class DiagConsoleLogger implements DiagLogger {\n constructor() {\n function _consoleFunc(funcName: ConsoleMapKeys): DiagLogFunction {\n return function (...args) {\n if (console) {\n // Some environments only expose the console when the F12 developer console is open\n // eslint-disable-next-line no-console\n let theFunc = console[funcName];\n if (typeof theFunc !== 'function') {\n // Not all environments support all functions\n // eslint-disable-next-line no-console\n theFunc = console.log;\n }\n\n // One last final check\n if (typeof theFunc === 'function') {\n return theFunc.apply(console, args);\n }\n }\n };\n }\n\n for (let i = 0; i < consoleMap.length; i++) {\n this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c);\n }\n }\n\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n public error!: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n public warn!: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n public info!: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario. Useful scenarios would be to log the execution\n * order of async operations\n */\n public debug!: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n public verbose!: DiagLogFunction;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.d.ts b/node_modules/@opentelemetry/api/build/src/diag/index.d.ts new file mode 100644 index 0000000000..11b35740b5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.d.ts @@ -0,0 +1,3 @@ +export * from './consoleLogger'; +export * from './types'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.js b/node_modules/@opentelemetry/api/build/src/diag/index.js new file mode 100644 index 0000000000..45c50d9077 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.js @@ -0,0 +1,30 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./consoleLogger"), exports); +__exportStar(require("./types"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/index.js.map b/node_modules/@opentelemetry/api/build/src/diag/index.js.map new file mode 100644 index 0000000000..c0a526c1ac --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/diag/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,kDAAgC;AAChC,0CAAwB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './consoleLogger';\nexport * from './types';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts new file mode 100644 index 0000000000..890b9f1ef4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.d.ts @@ -0,0 +1,3 @@ +import { DiagLogger, DiagLogLevel } from '../types'; +export declare function createLogLevelDiagLogger(maxLevel: DiagLogLevel, logger: DiagLogger): DiagLogger; +//# sourceMappingURL=logLevelLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js new file mode 100644 index 0000000000..3adc6659f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js @@ -0,0 +1,45 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createLogLevelDiagLogger = void 0; +var types_1 = require("../types"); +function createLogLevelDiagLogger(maxLevel, logger) { + if (maxLevel < types_1.DiagLogLevel.NONE) { + maxLevel = types_1.DiagLogLevel.NONE; + } + else if (maxLevel > types_1.DiagLogLevel.ALL) { + maxLevel = types_1.DiagLogLevel.ALL; + } + // In case the logger is null or undefined + logger = logger || {}; + function _filterFunc(funcName, theLevel) { + var theFunc = logger[funcName]; + if (typeof theFunc === 'function' && maxLevel >= theLevel) { + return theFunc.bind(logger); + } + return function () { }; + } + return { + error: _filterFunc('error', types_1.DiagLogLevel.ERROR), + warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), + info: _filterFunc('info', types_1.DiagLogLevel.INFO), + debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), + verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), + }; +} +exports.createLogLevelDiagLogger = createLogLevelDiagLogger; +//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map new file mode 100644 index 0000000000..b56d1b23b8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"logLevelLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/logLevelLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,kCAAqE;AAErE,SAAgB,wBAAwB,CACtC,QAAsB,EACtB,MAAkB;IAElB,IAAI,QAAQ,GAAG,oBAAY,CAAC,IAAI,EAAE;QAChC,QAAQ,GAAG,oBAAY,CAAC,IAAI,CAAC;KAC9B;SAAM,IAAI,QAAQ,GAAG,oBAAY,CAAC,GAAG,EAAE;QACtC,QAAQ,GAAG,oBAAY,CAAC,GAAG,CAAC;KAC7B;IAED,0CAA0C;IAC1C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAC;IAEtB,SAAS,WAAW,CAClB,QAA0B,EAC1B,QAAsB;QAEtB,IAAM,OAAO,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAC;QAEjC,IAAI,OAAO,OAAO,KAAK,UAAU,IAAI,QAAQ,IAAI,QAAQ,EAAE;YACzD,OAAO,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;QACD,OAAO,cAAa,CAAC,CAAC;IACxB,CAAC;IAED,OAAO;QACL,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,IAAI,EAAE,WAAW,CAAC,MAAM,EAAE,oBAAY,CAAC,IAAI,CAAC;QAC5C,KAAK,EAAE,WAAW,CAAC,OAAO,EAAE,oBAAY,CAAC,KAAK,CAAC;QAC/C,OAAO,EAAE,WAAW,CAAC,SAAS,EAAE,oBAAY,CAAC,OAAO,CAAC;KACtD,CAAC;AACJ,CAAC;AAhCD,4DAgCC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogFunction, DiagLogger, DiagLogLevel } from '../types';\n\nexport function createLogLevelDiagLogger(\n maxLevel: DiagLogLevel,\n logger: DiagLogger\n): DiagLogger {\n if (maxLevel < DiagLogLevel.NONE) {\n maxLevel = DiagLogLevel.NONE;\n } else if (maxLevel > DiagLogLevel.ALL) {\n maxLevel = DiagLogLevel.ALL;\n }\n\n // In case the logger is null or undefined\n logger = logger || {};\n\n function _filterFunc(\n funcName: keyof DiagLogger,\n theLevel: DiagLogLevel\n ): DiagLogFunction {\n const theFunc = logger[funcName];\n\n if (typeof theFunc === 'function' && maxLevel >= theLevel) {\n return theFunc.bind(logger);\n }\n return function () {};\n }\n\n return {\n error: _filterFunc('error', DiagLogLevel.ERROR),\n warn: _filterFunc('warn', DiagLogLevel.WARN),\n info: _filterFunc('info', DiagLogLevel.INFO),\n debug: _filterFunc('debug', DiagLogLevel.DEBUG),\n verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE),\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts new file mode 100644 index 0000000000..ac71ee3b58 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.d.ts @@ -0,0 +1,8 @@ +import { DiagLogger } from '../types'; +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +export declare function createNoopDiagLogger(): DiagLogger; +//# sourceMappingURL=noopLogger.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js new file mode 100644 index 0000000000..4091631451 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createNoopDiagLogger = void 0; +function noopLogFunction() { } +/** + * Returns a No-Op Diagnostic logger where all messages do nothing. + * @implements {@link DiagLogger} + * @returns {DiagLogger} + */ +function createNoopDiagLogger() { + return { + verbose: noopLogFunction, + debug: noopLogFunction, + info: noopLogFunction, + warn: noopLogFunction, + error: noopLogFunction, + }; +} +exports.createNoopDiagLogger = createNoopDiagLogger; +//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map new file mode 100644 index 0000000000..20e0e81ef7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js.map @@ -0,0 +1 @@ +{"version":3,"file":"noopLogger.js","sourceRoot":"","sources":["../../../../src/diag/internal/noopLogger.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,SAAS,eAAe,KAAI,CAAC;AAE7B;;;;GAIG;AACH,SAAgB,oBAAoB;IAClC,OAAO;QACL,OAAO,EAAE,eAAe;QACxB,KAAK,EAAE,eAAe;QACtB,IAAI,EAAE,eAAe;QACrB,IAAI,EAAE,eAAe;QACrB,KAAK,EAAE,eAAe;KACvB,CAAC;AACJ,CAAC;AARD,oDAQC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { DiagLogger } from '../types';\n\nfunction noopLogFunction() {}\n\n/**\n * Returns a No-Op Diagnostic logger where all messages do nothing.\n * @implements {@link DiagLogger}\n * @returns {DiagLogger}\n */\nexport function createNoopDiagLogger(): DiagLogger {\n return {\n verbose: noopLogFunction,\n debug: noopLogFunction,\n info: noopLogFunction,\n warn: noopLogFunction,\n error: noopLogFunction,\n };\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.d.ts b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts new file mode 100644 index 0000000000..92b0c57868 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.d.ts @@ -0,0 +1,70 @@ +export declare type DiagLogFunction = (message: string, ...args: unknown[]) => void; +/** + * Defines an internal diagnostic logger interface which is used to log internal diagnostic + * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function. + * API provided implementations include :- + * - a No-Op {@link createNoopDiagLogger} + * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger} + * - a general Console {@link DiagConsoleLogger} version. + */ +export interface DiagLogger { + /** Log an error scenario that was not expected and caused the requested operation to fail. */ + error: DiagLogFunction; + /** + * Log a warning scenario to inform the developer of an issues that should be investigated. + * The requested operation may or may not have succeeded or completed. + */ + warn: DiagLogFunction; + /** + * Log a general informational message, this should not affect functionality. + * This is also the default logging level so this should NOT be used for logging + * debugging level information. + */ + info: DiagLogFunction; + /** + * Log a general debug message that can be useful for identifying a failure. + * Information logged at this level may include diagnostic details that would + * help identify a failure scenario. + * For example: Logging the order of execution of async operations. + */ + debug: DiagLogFunction; + /** + * Log a detailed (verbose) trace level logging that can be used to identify failures + * where debug level logging would be insufficient, this level of tracing can include + * input and output parameters and as such may include PII information passing through + * the API. As such it is recommended that this level of tracing should not be enabled + * in a production environment. + */ + verbose: DiagLogFunction; +} +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +export declare enum DiagLogLevel { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + NONE = 0, + /** Identifies an error scenario */ + ERROR = 30, + /** Identifies a warning scenario */ + WARN = 50, + /** General informational log message */ + INFO = 60, + /** General debug log message */ + DEBUG = 70, + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + VERBOSE = 80, + /** Used to set the logging level to include all logging */ + ALL = 9999 +} +/** + * Defines options for ComponentLogger + */ +export interface ComponentLoggerOptions { + namespace: string; +} +//# sourceMappingURL=types.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js b/node_modules/@opentelemetry/api/build/src/diag/types.js new file mode 100644 index 0000000000..c195e45e8c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js @@ -0,0 +1,44 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DiagLogLevel = void 0; +/** + * Defines the available internal logging levels for the diagnostic logger, the numeric values + * of the levels are defined to match the original values from the initial LogLevel to avoid + * compatibility/migration issues for any implementation that assume the numeric ordering. + */ +var DiagLogLevel; +(function (DiagLogLevel) { + /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ + DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; + /** Identifies an error scenario */ + DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; + /** Identifies a warning scenario */ + DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; + /** General informational log message */ + DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; + /** General debug log message */ + DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; + /** + * Detailed trace level logging should only be used for development, should only be set + * in a development environment. + */ + DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; + /** Used to set the logging level to include all logging */ + DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; +})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js.map b/node_modules/@opentelemetry/api/build/src/diag/types.js.map new file mode 100644 index 0000000000..22dc8e213c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/diag/types.js.map @@ -0,0 +1 @@ +{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/diag/types.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AA+CH;;;;GAIG;AACH,IAAY,YAwBX;AAxBD,WAAY,YAAY;IACtB,uFAAuF;IACvF,+CAAQ,CAAA;IAER,mCAAmC;IACnC,kDAAU,CAAA;IAEV,oCAAoC;IACpC,gDAAS,CAAA;IAET,wCAAwC;IACxC,gDAAS,CAAA;IAET,gCAAgC;IAChC,kDAAU,CAAA;IAEV;;;OAGG;IACH,sDAAY,CAAA;IAEZ,2DAA2D;IAC3D,gDAAU,CAAA;AACZ,CAAC,EAxBW,YAAY,GAAZ,oBAAY,KAAZ,oBAAY,QAwBvB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport type DiagLogFunction = (message: string, ...args: unknown[]) => void;\n\n/**\n * Defines an internal diagnostic logger interface which is used to log internal diagnostic\n * messages, you can set the default diagnostic logger via the {@link DiagAPI} setLogger function.\n * API provided implementations include :-\n * - a No-Op {@link createNoopDiagLogger}\n * - a {@link DiagLogLevel} filtering wrapper {@link createLogLevelDiagLogger}\n * - a general Console {@link DiagConsoleLogger} version.\n */\nexport interface DiagLogger {\n /** Log an error scenario that was not expected and caused the requested operation to fail. */\n error: DiagLogFunction;\n\n /**\n * Log a warning scenario to inform the developer of an issues that should be investigated.\n * The requested operation may or may not have succeeded or completed.\n */\n warn: DiagLogFunction;\n\n /**\n * Log a general informational message, this should not affect functionality.\n * This is also the default logging level so this should NOT be used for logging\n * debugging level information.\n */\n info: DiagLogFunction;\n\n /**\n * Log a general debug message that can be useful for identifying a failure.\n * Information logged at this level may include diagnostic details that would\n * help identify a failure scenario.\n * For example: Logging the order of execution of async operations.\n */\n debug: DiagLogFunction;\n\n /**\n * Log a detailed (verbose) trace level logging that can be used to identify failures\n * where debug level logging would be insufficient, this level of tracing can include\n * input and output parameters and as such may include PII information passing through\n * the API. As such it is recommended that this level of tracing should not be enabled\n * in a production environment.\n */\n verbose: DiagLogFunction;\n}\n\n/**\n * Defines the available internal logging levels for the diagnostic logger, the numeric values\n * of the levels are defined to match the original values from the initial LogLevel to avoid\n * compatibility/migration issues for any implementation that assume the numeric ordering.\n */\nexport enum DiagLogLevel {\n /** Diagnostic Logging level setting to disable all logging (except and forced logs) */\n NONE = 0,\n\n /** Identifies an error scenario */\n ERROR = 30,\n\n /** Identifies a warning scenario */\n WARN = 50,\n\n /** General informational log message */\n INFO = 60,\n\n /** General debug log message */\n DEBUG = 70,\n\n /**\n * Detailed trace level logging should only be used for development, should only be set\n * in a development environment.\n */\n VERBOSE = 80,\n\n /** Used to set the logging level to include all logging */\n ALL = 9999,\n}\n\n/**\n * Defines options for ComponentLogger\n */\nexport interface ComponentLoggerOptions {\n namespace: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.d.ts b/node_modules/@opentelemetry/api/build/src/index.d.ts new file mode 100644 index 0000000000..33ff2300ad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.d.ts @@ -0,0 +1,57 @@ +export * from './baggage/types'; +export { baggageEntryMetadataFromString } from './baggage/utils'; +export * from './common/Exception'; +export * from './common/Time'; +export * from './common/Attributes'; +export * from './diag'; +export * from './propagation/TextMapPropagator'; +export * from './trace/attributes'; +export * from './trace/link'; +export * from './trace/ProxyTracer'; +export * from './trace/ProxyTracerProvider'; +export * from './trace/Sampler'; +export * from './trace/SamplingResult'; +export * from './trace/span_context'; +export * from './trace/span_kind'; +export * from './trace/span'; +export * from './trace/SpanOptions'; +export * from './trace/status'; +export * from './trace/trace_flags'; +export * from './trace/trace_state'; +export { createTraceState } from './trace/internal/utils'; +export * from './trace/tracer_provider'; +export * from './trace/tracer'; +export * from './trace/tracer_options'; +export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; +export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; +export * from './context/context'; +export * from './context/types'; +import { ContextAPI } from './api/context'; +export type { ContextAPI } from './api/context'; +/** Entrypoint for context API */ +export declare const context: ContextAPI; +import { TraceAPI } from './api/trace'; +export type { TraceAPI } from './api/trace'; +/** Entrypoint for trace API */ +export declare const trace: TraceAPI; +import { PropagationAPI } from './api/propagation'; +export type { PropagationAPI } from './api/propagation'; +/** Entrypoint for propagation API */ +export declare const propagation: PropagationAPI; +import { DiagAPI } from './api/diag'; +export type { DiagAPI } from './api/diag'; +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +export declare const diag: DiagAPI; +declare const _default: { + trace: TraceAPI; + context: ContextAPI; + propagation: PropagationAPI; + diag: DiagAPI; +}; +export default _default; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js b/node_modules/@opentelemetry/api/build/src/index.js new file mode 100644 index 0000000000..189bb606dd --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js @@ -0,0 +1,88 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.diag = exports.propagation = exports.trace = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.baggageEntryMetadataFromString = void 0; +__exportStar(require("./baggage/types"), exports); +var utils_1 = require("./baggage/utils"); +Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); +__exportStar(require("./common/Exception"), exports); +__exportStar(require("./common/Time"), exports); +__exportStar(require("./common/Attributes"), exports); +__exportStar(require("./diag"), exports); +__exportStar(require("./propagation/TextMapPropagator"), exports); +__exportStar(require("./trace/attributes"), exports); +__exportStar(require("./trace/link"), exports); +__exportStar(require("./trace/ProxyTracer"), exports); +__exportStar(require("./trace/ProxyTracerProvider"), exports); +__exportStar(require("./trace/Sampler"), exports); +__exportStar(require("./trace/SamplingResult"), exports); +__exportStar(require("./trace/span_context"), exports); +__exportStar(require("./trace/span_kind"), exports); +__exportStar(require("./trace/span"), exports); +__exportStar(require("./trace/SpanOptions"), exports); +__exportStar(require("./trace/status"), exports); +__exportStar(require("./trace/trace_flags"), exports); +__exportStar(require("./trace/trace_state"), exports); +var utils_2 = require("./trace/internal/utils"); +Object.defineProperty(exports, "createTraceState", { enumerable: true, get: function () { return utils_2.createTraceState; } }); +__exportStar(require("./trace/tracer_provider"), exports); +__exportStar(require("./trace/tracer"), exports); +__exportStar(require("./trace/tracer_options"), exports); +var spancontext_utils_1 = require("./trace/spancontext-utils"); +Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } }); +Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); +Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); +var invalid_span_constants_1 = require("./trace/invalid-span-constants"); +Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } }); +Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); +Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); +__exportStar(require("./context/context"), exports); +__exportStar(require("./context/types"), exports); +var context_1 = require("./api/context"); +/** Entrypoint for context API */ +exports.context = context_1.ContextAPI.getInstance(); +var trace_1 = require("./api/trace"); +/** Entrypoint for trace API */ +exports.trace = trace_1.TraceAPI.getInstance(); +var propagation_1 = require("./api/propagation"); +/** Entrypoint for propagation API */ +exports.propagation = propagation_1.PropagationAPI.getInstance(); +var diag_1 = require("./api/diag"); +/** + * Entrypoint for Diag API. + * Defines Diagnostic handler used for internal diagnostic logging operations. + * The default provides a Noop DiagLogger implementation which may be changed via the + * diag.setLogger(logger: DiagLogger) function. + */ +exports.diag = diag_1.DiagAPI.instance(); +exports.default = { + trace: exports.trace, + context: exports.context, + propagation: exports.propagation, + diag: exports.diag, +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js.map b/node_modules/@opentelemetry/api/build/src/index.js.map new file mode 100644 index 0000000000..9f89a0c51b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;;AAEH,kDAAgC;AAChC,yCAAiE;AAAxD,uHAAA,8BAA8B,OAAA;AACvC,qDAAmC;AACnC,gDAA8B;AAC9B,sDAAoC;AACpC,yCAAuB;AACvB,kEAAgD;AAChD,qDAAmC;AACnC,+CAA6B;AAC7B,sDAAoC;AACpC,8DAA4C;AAC5C,kDAAgC;AAChC,yDAAuC;AACvC,uDAAqC;AACrC,oDAAkC;AAClC,+CAA6B;AAC7B,sDAAoC;AACpC,iDAA+B;AAC/B,sDAAoC;AACpC,sDAAoC;AACpC,gDAA0D;AAAjD,yGAAA,gBAAgB,OAAA;AACzB,0DAAwC;AACxC,iDAA+B;AAC/B,yDAAuC;AAEvC,+DAImC;AAHjC,uHAAA,kBAAkB,OAAA;AAClB,mHAAA,cAAc,OAAA;AACd,kHAAA,aAAa,OAAA;AAGf,yEAIwC;AAHtC,wHAAA,cAAc,OAAA;AACd,yHAAA,eAAe,OAAA;AACf,8HAAA,oBAAoB,OAAA;AAGtB,oDAAkC;AAClC,kDAAgC;AAEhC,yCAA2C;AAE3C,iCAAiC;AACpB,QAAA,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC;AAEhD,qCAAuC;AAEvC,+BAA+B;AAClB,QAAA,KAAK,GAAG,gBAAQ,CAAC,WAAW,EAAE,CAAC;AAE5C,iDAAmD;AAEnD,qCAAqC;AACxB,QAAA,WAAW,GAAG,4BAAc,CAAC,WAAW,EAAE,CAAC;AAExD,mCAAqC;AAGrC;;;;;GAKG;AACU,QAAA,IAAI,GAAG,cAAO,CAAC,QAAQ,EAAE,CAAC;AAEvC,kBAAe;IACb,KAAK,eAAA;IACL,OAAO,iBAAA;IACP,WAAW,qBAAA;IACX,IAAI,cAAA;CACL,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './baggage/types';\nexport { baggageEntryMetadataFromString } from './baggage/utils';\nexport * from './common/Exception';\nexport * from './common/Time';\nexport * from './common/Attributes';\nexport * from './diag';\nexport * from './propagation/TextMapPropagator';\nexport * from './trace/attributes';\nexport * from './trace/link';\nexport * from './trace/ProxyTracer';\nexport * from './trace/ProxyTracerProvider';\nexport * from './trace/Sampler';\nexport * from './trace/SamplingResult';\nexport * from './trace/span_context';\nexport * from './trace/span_kind';\nexport * from './trace/span';\nexport * from './trace/SpanOptions';\nexport * from './trace/status';\nexport * from './trace/trace_flags';\nexport * from './trace/trace_state';\nexport { createTraceState } from './trace/internal/utils';\nexport * from './trace/tracer_provider';\nexport * from './trace/tracer';\nexport * from './trace/tracer_options';\n\nexport {\n isSpanContextValid,\n isValidTraceId,\n isValidSpanId,\n} from './trace/spancontext-utils';\n\nexport {\n INVALID_SPANID,\n INVALID_TRACEID,\n INVALID_SPAN_CONTEXT,\n} from './trace/invalid-span-constants';\n\nexport * from './context/context';\nexport * from './context/types';\n\nimport { ContextAPI } from './api/context';\nexport type { ContextAPI } from './api/context';\n/** Entrypoint for context API */\nexport const context = ContextAPI.getInstance();\n\nimport { TraceAPI } from './api/trace';\nexport type { TraceAPI } from './api/trace';\n/** Entrypoint for trace API */\nexport const trace = TraceAPI.getInstance();\n\nimport { PropagationAPI } from './api/propagation';\nexport type { PropagationAPI } from './api/propagation';\n/** Entrypoint for propagation API */\nexport const propagation = PropagationAPI.getInstance();\n\nimport { DiagAPI } from './api/diag';\nexport type { DiagAPI } from './api/diag';\n\n/**\n * Entrypoint for Diag API.\n * Defines Diagnostic handler used for internal diagnostic logging operations.\n * The default provides a Noop DiagLogger implementation which may be changed via the\n * diag.setLogger(logger: DiagLogger) function.\n */\nexport const diag = DiagAPI.instance();\n\nexport default {\n trace,\n context,\n propagation,\n diag,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts new file mode 100644 index 0000000000..bb90097f98 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.d.ts @@ -0,0 +1,16 @@ +import { ContextManager } from '../context/types'; +import { DiagLogger } from '../diag'; +import { TextMapPropagator } from '../propagation/TextMapPropagator'; +import type { TracerProvider } from '../trace/tracer_provider'; +export declare function registerGlobal(type: Type, instance: OTelGlobalAPI[Type], diag: DiagLogger, allowOverride?: boolean): boolean; +export declare function getGlobal(type: Type): OTelGlobalAPI[Type] | undefined; +export declare function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger): void; +declare type OTelGlobalAPI = { + version: string; + diag?: DiagLogger; + trace?: TracerProvider; + context?: ContextManager; + propagation?: TextMapPropagator; +}; +export {}; +//# sourceMappingURL=global-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js new file mode 100644 index 0000000000..4ce231ff4f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js @@ -0,0 +1,65 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; +var platform_1 = require("../platform"); +var version_1 = require("../version"); +var semver_1 = require("./semver"); +var major = version_1.VERSION.split('.')[0]; +var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); +var _global = platform_1._globalThis; +function registerGlobal(type, instance, diag, allowOverride) { + var _a; + if (allowOverride === void 0) { allowOverride = false; } + var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { + version: version_1.VERSION, + }); + if (!allowOverride && api[type]) { + // already registered an API of this type + var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); + diag.error(err.stack || err.message); + return false; + } + if (api.version !== version_1.VERSION) { + // All registered APIs must be of the same version exactly + var err = new Error('@opentelemetry/api: All API registration versions must match'); + diag.error(err.stack || err.message); + return false; + } + api[type] = instance; + diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + version_1.VERSION + "."); + return true; +} +exports.registerGlobal = registerGlobal; +function getGlobal(type) { + var _a, _b; + var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; + if (!globalVersion || !semver_1.isCompatible(globalVersion)) { + return; + } + return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; +} +exports.getGlobal = getGlobal; +function unregisterGlobal(type, diag) { + diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + version_1.VERSION + "."); + var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; + if (api) { + delete api[type]; + } +} +exports.unregisterGlobal = unregisterGlobal; +//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map new file mode 100644 index 0000000000..d8aca3b477 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"global-utils.js","sourceRoot":"","sources":["../../../src/internal/global-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,wCAA0C;AAG1C,sCAAqC;AACrC,mCAAwC;AAExC,IAAM,KAAK,GAAG,iBAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;AACpC,IAAM,4BAA4B,GAAG,MAAM,CAAC,GAAG,CAC7C,0BAAwB,KAAO,CAChC,CAAC;AAEF,IAAM,OAAO,GAAG,sBAAyB,CAAC;AAE1C,SAAgB,cAAc,CAC5B,IAAU,EACV,QAA6B,EAC7B,IAAgB,EAChB,aAAqB;;IAArB,8BAAA,EAAA,qBAAqB;IAErB,IAAM,GAAG,GAAG,CAAC,OAAO,CAAC,4BAA4B,CAAC,GAAG,MAAA,OAAO,CAC1D,4BAA4B,CAC7B,mCAAI;QACH,OAAO,EAAE,iBAAO;KACjB,CAAC,CAAC;IAEH,IAAI,CAAC,aAAa,IAAI,GAAG,CAAC,IAAI,CAAC,EAAE;QAC/B,yCAAyC;QACzC,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,kEAAgE,IAAM,CACvE,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,IAAI,GAAG,CAAC,OAAO,KAAK,iBAAO,EAAE;QAC3B,0DAA0D;QAC1D,IAAM,GAAG,GAAG,IAAI,KAAK,CACnB,8DAA8D,CAC/D,CAAC;QACF,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC;QACrC,OAAO,KAAK,CAAC;KACd;IAED,GAAG,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC;IACrB,IAAI,CAAC,KAAK,CACR,iDAA+C,IAAI,UAAK,iBAAO,MAAG,CACnE,CAAC;IAEF,OAAO,IAAI,CAAC;AACd,CAAC;AApCD,wCAoCC;AAED,SAAgB,SAAS,CACvB,IAAU;;IAEV,IAAM,aAAa,GAAG,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAE,OAAO,CAAC;IACrE,IAAI,CAAC,aAAa,IAAI,CAAC,qBAAY,CAAC,aAAa,CAAC,EAAE;QAClD,OAAO;KACR;IACD,OAAO,MAAA,OAAO,CAAC,4BAA4B,CAAC,0CAAG,IAAI,CAAC,CAAC;AACvD,CAAC;AARD,8BAQC;AAED,SAAgB,gBAAgB,CAAC,IAAyB,EAAE,IAAgB;IAC1E,IAAI,CAAC,KAAK,CACR,oDAAkD,IAAI,UAAK,iBAAO,MAAG,CACtE,CAAC;IACF,IAAM,GAAG,GAAG,OAAO,CAAC,4BAA4B,CAAC,CAAC;IAElD,IAAI,GAAG,EAAE;QACP,OAAO,GAAG,CAAC,IAAI,CAAC,CAAC;KAClB;AACH,CAAC;AATD,4CASC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextManager } from '../context/types';\nimport { DiagLogger } from '../diag';\nimport { _globalThis } from '../platform';\nimport { TextMapPropagator } from '../propagation/TextMapPropagator';\nimport type { TracerProvider } from '../trace/tracer_provider';\nimport { VERSION } from '../version';\nimport { isCompatible } from './semver';\n\nconst major = VERSION.split('.')[0];\nconst GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(\n `opentelemetry.js.api.${major}`\n);\n\nconst _global = _globalThis as OTelGlobal;\n\nexport function registerGlobal(\n type: Type,\n instance: OTelGlobalAPI[Type],\n diag: DiagLogger,\n allowOverride = false\n): boolean {\n const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = _global[\n GLOBAL_OPENTELEMETRY_API_KEY\n ] ?? {\n version: VERSION,\n });\n\n if (!allowOverride && api[type]) {\n // already registered an API of this type\n const err = new Error(\n `@opentelemetry/api: Attempted duplicate registration of API: ${type}`\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n if (api.version !== VERSION) {\n // All registered APIs must be of the same version exactly\n const err = new Error(\n '@opentelemetry/api: All API registration versions must match'\n );\n diag.error(err.stack || err.message);\n return false;\n }\n\n api[type] = instance;\n diag.debug(\n `@opentelemetry/api: Registered a global for ${type} v${VERSION}.`\n );\n\n return true;\n}\n\nexport function getGlobal(\n type: Type\n): OTelGlobalAPI[Type] | undefined {\n const globalVersion = _global[GLOBAL_OPENTELEMETRY_API_KEY]?.version;\n if (!globalVersion || !isCompatible(globalVersion)) {\n return;\n }\n return _global[GLOBAL_OPENTELEMETRY_API_KEY]?.[type];\n}\n\nexport function unregisterGlobal(type: keyof OTelGlobalAPI, diag: DiagLogger) {\n diag.debug(\n `@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`\n );\n const api = _global[GLOBAL_OPENTELEMETRY_API_KEY];\n\n if (api) {\n delete api[type];\n }\n}\n\ntype OTelGlobal = {\n [GLOBAL_OPENTELEMETRY_API_KEY]?: OTelGlobalAPI;\n};\n\ntype OTelGlobalAPI = {\n version: string;\n\n diag?: DiagLogger;\n trace?: TracerProvider;\n context?: ContextManager;\n propagation?: TextMapPropagator;\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts new file mode 100644 index 0000000000..d9f4259e45 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.d.ts @@ -0,0 +1,34 @@ +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +export declare function _makeCompatibilityCheck(ownVersion: string): (globalVersion: string) => boolean; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +export declare const isCompatible: (globalVersion: string) => boolean; +//# sourceMappingURL=semver.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js b/node_modules/@opentelemetry/api/build/src/internal/semver.js new file mode 100644 index 0000000000..b56392aec6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js @@ -0,0 +1,122 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isCompatible = exports._makeCompatibilityCheck = void 0; +var version_1 = require("../version"); +var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; +/** + * Create a function to test an API version to see if it is compatible with the provided ownVersion. + * + * The returned function has the following semantics: + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param ownVersion version which should be checked against + */ +function _makeCompatibilityCheck(ownVersion) { + var acceptedVersions = new Set([ownVersion]); + var rejectedVersions = new Set(); + var myVersionMatch = ownVersion.match(re); + if (!myVersionMatch) { + // we cannot guarantee compatibility so we always return noop + return function () { return false; }; + } + var ownVersionParsed = { + major: +myVersionMatch[1], + minor: +myVersionMatch[2], + patch: +myVersionMatch[3], + prerelease: myVersionMatch[4], + }; + // if ownVersion has a prerelease tag, versions must match exactly + if (ownVersionParsed.prerelease != null) { + return function isExactmatch(globalVersion) { + return globalVersion === ownVersion; + }; + } + function _reject(v) { + rejectedVersions.add(v); + return false; + } + function _accept(v) { + acceptedVersions.add(v); + return true; + } + return function isCompatible(globalVersion) { + if (acceptedVersions.has(globalVersion)) { + return true; + } + if (rejectedVersions.has(globalVersion)) { + return false; + } + var globalVersionMatch = globalVersion.match(re); + if (!globalVersionMatch) { + // cannot parse other version + // we cannot guarantee compatibility so we always noop + return _reject(globalVersion); + } + var globalVersionParsed = { + major: +globalVersionMatch[1], + minor: +globalVersionMatch[2], + patch: +globalVersionMatch[3], + prerelease: globalVersionMatch[4], + }; + // if globalVersion has a prerelease tag, versions must match exactly + if (globalVersionParsed.prerelease != null) { + return _reject(globalVersion); + } + // major versions must match + if (ownVersionParsed.major !== globalVersionParsed.major) { + return _reject(globalVersion); + } + if (ownVersionParsed.major === 0) { + if (ownVersionParsed.minor === globalVersionParsed.minor && + ownVersionParsed.patch <= globalVersionParsed.patch) { + return _accept(globalVersion); + } + return _reject(globalVersion); + } + if (ownVersionParsed.minor <= globalVersionParsed.minor) { + return _accept(globalVersion); + } + return _reject(globalVersion); + }; +} +exports._makeCompatibilityCheck = _makeCompatibilityCheck; +/** + * Test an API version to see if it is compatible with this API. + * + * - Exact match is always compatible + * - Major versions must match exactly + * - 1.x package cannot use global 2.x package + * - 2.x package cannot use global 1.x package + * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API + * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects + * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 + * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor + * - Patch and build tag differences are not considered at this time + * + * @param version version of the API requesting an instance of the global API + */ +exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); +//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js.map b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map new file mode 100644 index 0000000000..c90c00dda9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/internal/semver.js.map @@ -0,0 +1 @@ +{"version":3,"file":"semver.js","sourceRoot":"","sources":["../../../src/internal/semver.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,sCAAqC;AAErC,IAAM,EAAE,GAAG,+BAA+B,CAAC;AAE3C;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,uBAAuB,CACrC,UAAkB;IAElB,IAAM,gBAAgB,GAAG,IAAI,GAAG,CAAS,CAAC,UAAU,CAAC,CAAC,CAAC;IACvD,IAAM,gBAAgB,GAAG,IAAI,GAAG,EAAU,CAAC;IAE3C,IAAM,cAAc,GAAG,UAAU,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;IAC5C,IAAI,CAAC,cAAc,EAAE;QACnB,6DAA6D;QAC7D,OAAO,cAAM,OAAA,KAAK,EAAL,CAAK,CAAC;KACpB;IAED,IAAM,gBAAgB,GAAG;QACvB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,KAAK,EAAE,CAAC,cAAc,CAAC,CAAC,CAAC;QACzB,UAAU,EAAE,cAAc,CAAC,CAAC,CAAC;KAC9B,CAAC;IAEF,kEAAkE;IAClE,IAAI,gBAAgB,CAAC,UAAU,IAAI,IAAI,EAAE;QACvC,OAAO,SAAS,YAAY,CAAC,aAAqB;YAChD,OAAO,aAAa,KAAK,UAAU,CAAC;QACtC,CAAC,CAAC;KACH;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,KAAK,CAAC;IACf,CAAC;IAED,SAAS,OAAO,CAAC,CAAS;QACxB,gBAAgB,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QACxB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO,SAAS,YAAY,CAAC,aAAqB;QAChD,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,IAAI,CAAC;SACb;QAED,IAAI,gBAAgB,CAAC,GAAG,CAAC,aAAa,CAAC,EAAE;YACvC,OAAO,KAAK,CAAC;SACd;QAED,IAAM,kBAAkB,GAAG,aAAa,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC;QACnD,IAAI,CAAC,kBAAkB,EAAE;YACvB,6BAA6B;YAC7B,sDAAsD;YACtD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAM,mBAAmB,GAAG;YAC1B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,KAAK,EAAE,CAAC,kBAAkB,CAAC,CAAC,CAAC;YAC7B,UAAU,EAAE,kBAAkB,CAAC,CAAC,CAAC;SAClC,CAAC;QAEF,qEAAqE;QACrE,IAAI,mBAAmB,CAAC,UAAU,IAAI,IAAI,EAAE;YAC1C,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,4BAA4B;QAC5B,IAAI,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK,EAAE;YACxD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,KAAK,CAAC,EAAE;YAChC,IACE,gBAAgB,CAAC,KAAK,KAAK,mBAAmB,CAAC,KAAK;gBACpD,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EACnD;gBACA,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;aAC/B;YAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,IAAI,gBAAgB,CAAC,KAAK,IAAI,mBAAmB,CAAC,KAAK,EAAE;YACvD,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;SAC/B;QAED,OAAO,OAAO,CAAC,aAAa,CAAC,CAAC;IAChC,CAAC,CAAC;AACJ,CAAC;AAtFD,0DAsFC;AAED;;;;;;;;;;;;;;GAcG;AACU,QAAA,YAAY,GAAG,uBAAuB,CAAC,iBAAO,CAAC,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { VERSION } from '../version';\n\nconst re = /^(\\d+)\\.(\\d+)\\.(\\d+)(-(.+))?$/;\n\n/**\n * Create a function to test an API version to see if it is compatible with the provided ownVersion.\n *\n * The returned function has the following semantics:\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param ownVersion version which should be checked against\n */\nexport function _makeCompatibilityCheck(\n ownVersion: string\n): (globalVersion: string) => boolean {\n const acceptedVersions = new Set([ownVersion]);\n const rejectedVersions = new Set();\n\n const myVersionMatch = ownVersion.match(re);\n if (!myVersionMatch) {\n // we cannot guarantee compatibility so we always return noop\n return () => false;\n }\n\n const ownVersionParsed = {\n major: +myVersionMatch[1],\n minor: +myVersionMatch[2],\n patch: +myVersionMatch[3],\n prerelease: myVersionMatch[4],\n };\n\n // if ownVersion has a prerelease tag, versions must match exactly\n if (ownVersionParsed.prerelease != null) {\n return function isExactmatch(globalVersion: string): boolean {\n return globalVersion === ownVersion;\n };\n }\n\n function _reject(v: string) {\n rejectedVersions.add(v);\n return false;\n }\n\n function _accept(v: string) {\n acceptedVersions.add(v);\n return true;\n }\n\n return function isCompatible(globalVersion: string): boolean {\n if (acceptedVersions.has(globalVersion)) {\n return true;\n }\n\n if (rejectedVersions.has(globalVersion)) {\n return false;\n }\n\n const globalVersionMatch = globalVersion.match(re);\n if (!globalVersionMatch) {\n // cannot parse other version\n // we cannot guarantee compatibility so we always noop\n return _reject(globalVersion);\n }\n\n const globalVersionParsed = {\n major: +globalVersionMatch[1],\n minor: +globalVersionMatch[2],\n patch: +globalVersionMatch[3],\n prerelease: globalVersionMatch[4],\n };\n\n // if globalVersion has a prerelease tag, versions must match exactly\n if (globalVersionParsed.prerelease != null) {\n return _reject(globalVersion);\n }\n\n // major versions must match\n if (ownVersionParsed.major !== globalVersionParsed.major) {\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.major === 0) {\n if (\n ownVersionParsed.minor === globalVersionParsed.minor &&\n ownVersionParsed.patch <= globalVersionParsed.patch\n ) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n }\n\n if (ownVersionParsed.minor <= globalVersionParsed.minor) {\n return _accept(globalVersion);\n }\n\n return _reject(globalVersion);\n };\n}\n\n/**\n * Test an API version to see if it is compatible with this API.\n *\n * - Exact match is always compatible\n * - Major versions must match exactly\n * - 1.x package cannot use global 2.x package\n * - 2.x package cannot use global 1.x package\n * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API\n * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects\n * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3\n * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor\n * - Patch and build tag differences are not considered at this time\n *\n * @param version version of the API requesting an instance of the global API\n */\nexport const isCompatible = _makeCompatibilityCheck(VERSION);\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts new file mode 100644 index 0000000000..e73fd73e55 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.d.ts @@ -0,0 +1,10 @@ +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js new file mode 100644 index 0000000000..747610cd32 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +// Updates to this file should also be replicated to @opentelemetry/api-metrics and +// @opentelemetry/core too. +/** + * - globalThis (New standard) + * - self (Will return the current window instance for supported browsers) + * - window (fallback for older browser implementations) + * - global (NodeJS implementation) + * - (When all else fails) + */ +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef +exports._globalThis = typeof globalThis === 'object' ? globalThis : + typeof self === 'object' ? self : + typeof window === 'object' ? window : + typeof global === 'object' ? global : + {}; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map new file mode 100644 index 0000000000..6a0e64ea30 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/browser/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,mFAAmF;AACnF,2BAA2B;AAE3B;;;;;;GAMG;AAEH,gEAAgE;AAChE,8EAA8E;AACjE,QAAA,WAAW,GACtB,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;IAC7C,OAAO,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;QACjC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;YACrC,OAAO,MAAM,KAAK,QAAQ,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;gBACrC,EAAuB,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Updates to this file should also be replicated to @opentelemetry/api-metrics and\n// @opentelemetry/core too.\n\n/**\n * - globalThis (New standard)\n * - self (Will return the current window instance for supported browsers)\n * - window (fallback for older browser implementations)\n * - global (NodeJS implementation)\n * - (When all else fails)\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef\nexport const _globalThis: typeof globalThis =\n typeof globalThis === 'object' ? globalThis :\n typeof self === 'object' ? self :\n typeof window === 'object' ? window :\n typeof global === 'object' ? global :\n {} as typeof globalThis;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts new file mode 100644 index 0000000000..ba20e1232c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js new file mode 100644 index 0000000000..99fd57c8a6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map new file mode 100644 index 0000000000..5a406a9925 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/browser/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts new file mode 100644 index 0000000000..90595da96f --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.d.ts @@ -0,0 +1,2 @@ +export * from './node'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js b/node_modules/@opentelemetry/api/build/src/platform/index.js new file mode 100644 index 0000000000..33b834dbf2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./node"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/index.js.map new file mode 100644 index 0000000000..bc13701f17 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/platform/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,yCAAuB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './node';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts new file mode 100644 index 0000000000..e3c83e5d29 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.d.ts @@ -0,0 +1,3 @@ +/** only globals that common to node and browsers are allowed */ +export declare const _globalThis: typeof globalThis; +//# sourceMappingURL=globalThis.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js new file mode 100644 index 0000000000..82c4e3949e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js @@ -0,0 +1,22 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports._globalThis = void 0; +/** only globals that common to node and browsers are allowed */ +// eslint-disable-next-line node/no-unsupported-features/es-builtins +exports._globalThis = typeof globalThis === 'object' ? globalThis : global; +//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map new file mode 100644 index 0000000000..2f2ca822ec --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js.map @@ -0,0 +1 @@ +{"version":3,"file":"globalThis.js","sourceRoot":"","sources":["../../../../src/platform/node/globalThis.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,gEAAgE;AAChE,oEAAoE;AACvD,QAAA,WAAW,GAAG,OAAO,UAAU,KAAK,QAAQ,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC,CAAC,MAAM,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/** only globals that common to node and browsers are allowed */\n// eslint-disable-next-line node/no-unsupported-features/es-builtins\nexport const _globalThis = typeof globalThis === 'object' ? globalThis : global;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts new file mode 100644 index 0000000000..ba20e1232c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.d.ts @@ -0,0 +1,2 @@ +export * from './globalThis'; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js b/node_modules/@opentelemetry/api/build/src/platform/node/index.js new file mode 100644 index 0000000000..99fd57c8a6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js @@ -0,0 +1,29 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +__exportStar(require("./globalThis"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map new file mode 100644 index 0000000000..95561e9811 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/platform/node/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../../src/platform/node/index.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;;;;;;;;;;AAEH,+CAA6B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport * from './globalThis';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts new file mode 100644 index 0000000000..398021f3b2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.d.ts @@ -0,0 +1,13 @@ +import { Context } from '../context/types'; +import { TextMapPropagator } from './TextMapPropagator'; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +export declare class NoopTextMapPropagator implements TextMapPropagator { + /** Noop inject function does nothing */ + inject(_context: Context, _carrier: unknown): void; + /** Noop extract function does nothing and returns the input context */ + extract(context: Context, _carrier: unknown): Context; + fields(): string[]; +} +//# sourceMappingURL=NoopTextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js new file mode 100644 index 0000000000..123adeddad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js @@ -0,0 +1,37 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTextMapPropagator = void 0; +/** + * No-op implementations of {@link TextMapPropagator}. + */ +var NoopTextMapPropagator = /** @class */ (function () { + function NoopTextMapPropagator() { + } + /** Noop inject function does nothing */ + NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; + /** Noop extract function does nothing and returns the input context */ + NoopTextMapPropagator.prototype.extract = function (context, _carrier) { + return context; + }; + NoopTextMapPropagator.prototype.fields = function () { + return []; + }; + return NoopTextMapPropagator; +}()); +exports.NoopTextMapPropagator = NoopTextMapPropagator; +//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map new file mode 100644 index 0000000000..b893e15de2 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/NoopTextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH;;GAEG;AACH;IAAA;IAUA,CAAC;IATC,wCAAwC;IACxC,sCAAM,GAAN,UAAO,QAAiB,EAAE,QAAiB,IAAS,CAAC;IACrD,uEAAuE;IACvE,uCAAO,GAAP,UAAQ,OAAgB,EAAE,QAAiB;QACzC,OAAO,OAAO,CAAC;IACjB,CAAC;IACD,sCAAM,GAAN;QACE,OAAO,EAAE,CAAC;IACZ,CAAC;IACH,4BAAC;AAAD,CAAC,AAVD,IAUC;AAVY,sDAAqB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { TextMapPropagator } from './TextMapPropagator';\n\n/**\n * No-op implementations of {@link TextMapPropagator}.\n */\nexport class NoopTextMapPropagator implements TextMapPropagator {\n /** Noop inject function does nothing */\n inject(_context: Context, _carrier: unknown): void {}\n /** Noop extract function does nothing and returns the input context */\n extract(context: Context, _carrier: unknown): Context {\n return context;\n }\n fields(): string[] {\n return [];\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts new file mode 100644 index 0000000000..dc39367bfb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.d.ts @@ -0,0 +1,84 @@ +import { Context } from '../context/types'; +/** + * Injects `Context` into and extracts it from carriers that travel + * in-band across process boundaries. Encoding is expected to conform to the + * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request + * headers. + * + * The carrier of propagated data on both the client (injector) and server + * (extractor) side is usually an object such as http headers. Propagation is + * usually implemented via library-specific request interceptors, where the + * client-side injects values and the server-side extracts them. + */ +export interface TextMapPropagator { + /** + * Injects values from a given `Context` into a carrier. + * + * OpenTelemetry defines a common set of format values (TextMapPropagator), + * and each has an expected `carrier` type. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param setter an optional {@link TextMapSetter}. If undefined, values will be + * set by direct object assignment. + */ + inject(context: Context, carrier: Carrier, setter: TextMapSetter): void; + /** + * Given a `Context` and a carrier, extract context values from a + * carrier and return a new context, created from the old context, with the + * extracted values. + * + * @param context the Context from which to extract values to transmit over + * the wire. + * @param carrier the carrier of propagation fields, such as http request + * headers. + * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all + * own properties, and keys will be accessed by direct object access. + */ + extract(context: Context, carrier: Carrier, getter: TextMapGetter): Context; + /** + * Return a list of all fields which may be used by the propagator. + */ + fields(): string[]; +} +/** + * A setter is specified by the caller to define a specific method + * to set key/value pairs on the carrier within a propagator. + */ +export interface TextMapSetter { + /** + * Callback used to set a key/value pair on an object. + * + * Should be called by the propagator each time a key/value pair + * should be set, and should set that key/value pair on the propagator. + * + * @param carrier object or class which carries key/value pairs + * @param key string key to modify + * @param value value to be set to the key on the carrier + */ + set(carrier: Carrier, key: string, value: string): void; +} +/** + * A getter is specified by the caller to define a specific method + * to get the value of a key from a carrier. + */ +export interface TextMapGetter { + /** + * Get a list of all keys available on the carrier. + * + * @param carrier + */ + keys(carrier: Carrier): string[]; + /** + * Get the value of a specific key from the carrier. + * + * @param carrier + * @param key + */ + get(carrier: Carrier, key: string): undefined | string | string[]; +} +export declare const defaultTextMapGetter: TextMapGetter; +export declare const defaultTextMapSetter: TextMapSetter; +//# sourceMappingURL=TextMapPropagator.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js new file mode 100644 index 0000000000..880734153c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js @@ -0,0 +1,41 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; +exports.defaultTextMapGetter = { + get: function (carrier, key) { + if (carrier == null) { + return undefined; + } + return carrier[key]; + }, + keys: function (carrier) { + if (carrier == null) { + return []; + } + return Object.keys(carrier); + }, +}; +exports.defaultTextMapSetter = { + set: function (carrier, key, value) { + if (carrier == null) { + return; + } + carrier[key] = value; + }, +}; +//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map new file mode 100644 index 0000000000..06abd5a067 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js.map @@ -0,0 +1 @@ +{"version":3,"file":"TextMapPropagator.js","sourceRoot":"","sources":["../../../src/propagation/TextMapPropagator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAkGU,QAAA,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG;QACd,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,SAAS,CAAC;SAClB;QACD,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC;IACtB,CAAC;IAED,IAAI,YAAC,OAAO;QACV,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO,EAAE,CAAC;SACX;QACD,OAAO,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF,CAAC;AAEW,QAAA,oBAAoB,GAAkB;IACjD,GAAG,YAAC,OAAO,EAAE,GAAG,EAAE,KAAK;QACrB,IAAI,OAAO,IAAI,IAAI,EAAE;YACnB,OAAO;SACR;QAED,OAAO,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;IACvB,CAAC;CACF,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\n\n/**\n * Injects `Context` into and extracts it from carriers that travel\n * in-band across process boundaries. Encoding is expected to conform to the\n * HTTP Header Field semantics. Values are often encoded as RPC/HTTP request\n * headers.\n *\n * The carrier of propagated data on both the client (injector) and server\n * (extractor) side is usually an object such as http headers. Propagation is\n * usually implemented via library-specific request interceptors, where the\n * client-side injects values and the server-side extracts them.\n */\nexport interface TextMapPropagator {\n /**\n * Injects values from a given `Context` into a carrier.\n *\n * OpenTelemetry defines a common set of format values (TextMapPropagator),\n * and each has an expected `carrier` type.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param setter an optional {@link TextMapSetter}. If undefined, values will be\n * set by direct object assignment.\n */\n inject(\n context: Context,\n carrier: Carrier,\n setter: TextMapSetter\n ): void;\n\n /**\n * Given a `Context` and a carrier, extract context values from a\n * carrier and return a new context, created from the old context, with the\n * extracted values.\n *\n * @param context the Context from which to extract values to transmit over\n * the wire.\n * @param carrier the carrier of propagation fields, such as http request\n * headers.\n * @param getter an optional {@link TextMapGetter}. If undefined, keys will be all\n * own properties, and keys will be accessed by direct object access.\n */\n extract(\n context: Context,\n carrier: Carrier,\n getter: TextMapGetter\n ): Context;\n\n /**\n * Return a list of all fields which may be used by the propagator.\n */\n fields(): string[];\n}\n\n/**\n * A setter is specified by the caller to define a specific method\n * to set key/value pairs on the carrier within a propagator.\n */\nexport interface TextMapSetter {\n /**\n * Callback used to set a key/value pair on an object.\n *\n * Should be called by the propagator each time a key/value pair\n * should be set, and should set that key/value pair on the propagator.\n *\n * @param carrier object or class which carries key/value pairs\n * @param key string key to modify\n * @param value value to be set to the key on the carrier\n */\n set(carrier: Carrier, key: string, value: string): void;\n}\n\n/**\n * A getter is specified by the caller to define a specific method\n * to get the value of a key from a carrier.\n */\nexport interface TextMapGetter {\n /**\n * Get a list of all keys available on the carrier.\n *\n * @param carrier\n */\n keys(carrier: Carrier): string[];\n\n /**\n * Get the value of a specific key from the carrier.\n *\n * @param carrier\n * @param key\n */\n get(carrier: Carrier, key: string): undefined | string | string[];\n}\n\nexport const defaultTextMapGetter: TextMapGetter = {\n get(carrier, key) {\n if (carrier == null) {\n return undefined;\n }\n return carrier[key];\n },\n\n keys(carrier) {\n if (carrier == null) {\n return [];\n }\n return Object.keys(carrier);\n },\n};\n\nexport const defaultTextMapSetter: TextMapSetter = {\n set(carrier, key, value) {\n if (carrier == null) {\n return;\n }\n\n carrier[key] = value;\n },\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts new file mode 100644 index 0000000000..883cf91445 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.d.ts @@ -0,0 +1,25 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +export declare class NonRecordingSpan implements Span { + private readonly _spanContext; + constructor(_spanContext?: SpanContext); + spanContext(): SpanContext; + setAttribute(_key: string, _value: unknown): this; + setAttributes(_attributes: SpanAttributes): this; + addEvent(_name: string, _attributes?: SpanAttributes): this; + setStatus(_status: SpanStatus): this; + updateName(_name: string): this; + end(_endTime?: TimeInput): void; + isRecording(): boolean; + recordException(_exception: Exception, _time?: TimeInput): void; +} +//# sourceMappingURL=NonRecordingSpan.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js new file mode 100644 index 0000000000..d5f537a3f5 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js @@ -0,0 +1,65 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NonRecordingSpan = void 0; +var invalid_span_constants_1 = require("./invalid-span-constants"); +/** + * The NonRecordingSpan is the default {@link Span} that is used when no Span + * implementation is available. All operations are no-op including context + * propagation. + */ +var NonRecordingSpan = /** @class */ (function () { + function NonRecordingSpan(_spanContext) { + if (_spanContext === void 0) { _spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT; } + this._spanContext = _spanContext; + } + // Returns a SpanContext. + NonRecordingSpan.prototype.spanContext = function () { + return this._spanContext; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttribute = function (_key, _value) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setAttributes = function (_attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.setStatus = function (_status) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.updateName = function (_name) { + return this; + }; + // By default does nothing + NonRecordingSpan.prototype.end = function (_endTime) { }; + // isRecording always returns false for NonRecordingSpan. + NonRecordingSpan.prototype.isRecording = function () { + return false; + }; + // By default does nothing + NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; + return NonRecordingSpan; +}()); +exports.NonRecordingSpan = NonRecordingSpan; +//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map new file mode 100644 index 0000000000..7a105a14f3 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NonRecordingSpan.js","sourceRoot":"","sources":["../../../src/trace/NonRecordingSpan.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAKH,mEAAgE;AAKhE;;;;GAIG;AACH;IACE,0BACmB,YAAgD;QAAhD,6BAAA,EAAA,eAA4B,6CAAoB;QAAhD,iBAAY,GAAZ,YAAY,CAAoC;IAChE,CAAC;IAEJ,yBAAyB;IACzB,sCAAW,GAAX;QACE,OAAO,IAAI,CAAC,YAAY,CAAC;IAC3B,CAAC;IAED,0BAA0B;IAC1B,uCAAY,GAAZ,UAAa,IAAY,EAAE,MAAe;QACxC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,wCAAa,GAAb,UAAc,WAA2B;QACvC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,mCAAQ,GAAR,UAAS,KAAa,EAAE,WAA4B;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,oCAAS,GAAT,UAAU,OAAmB;QAC3B,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,qCAAU,GAAV,UAAW,KAAa;QACtB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,0BAA0B;IAC1B,8BAAG,GAAH,UAAI,QAAoB,IAAS,CAAC;IAElC,yDAAyD;IACzD,sCAAW,GAAX;QACE,OAAO,KAAK,CAAC;IACf,CAAC;IAED,0BAA0B;IAC1B,0CAAe,GAAf,UAAgB,UAAqB,EAAE,KAAiB,IAAS,CAAC;IACpE,uBAAC;AAAD,CAAC,AA7CD,IA6CC;AA7CY,4CAAgB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { INVALID_SPAN_CONTEXT } from './invalid-span-constants';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * The NonRecordingSpan is the default {@link Span} that is used when no Span\n * implementation is available. All operations are no-op including context\n * propagation.\n */\nexport class NonRecordingSpan implements Span {\n constructor(\n private readonly _spanContext: SpanContext = INVALID_SPAN_CONTEXT\n ) {}\n\n // Returns a SpanContext.\n spanContext(): SpanContext {\n return this._spanContext;\n }\n\n // By default does nothing\n setAttribute(_key: string, _value: unknown): this {\n return this;\n }\n\n // By default does nothing\n setAttributes(_attributes: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n addEvent(_name: string, _attributes?: SpanAttributes): this {\n return this;\n }\n\n // By default does nothing\n setStatus(_status: SpanStatus): this {\n return this;\n }\n\n // By default does nothing\n updateName(_name: string): this {\n return this;\n }\n\n // By default does nothing\n end(_endTime?: TimeInput): void {}\n\n // isRecording always returns false for NonRecordingSpan.\n isRecording(): boolean {\n return false;\n }\n\n // By default does nothing\n recordException(_exception: Exception, _time?: TimeInput): void {}\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts new file mode 100644 index 0000000000..0e059c990c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.d.ts @@ -0,0 +1,14 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +/** + * No-op implementations of {@link Tracer}. + */ +export declare class NoopTracer implements Tracer { + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan ReturnType>(name: string, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, fn: F): ReturnType; + startActiveSpan ReturnType>(name: string, opts: SpanOptions | undefined, ctx: Context | undefined, fn: F): ReturnType; +} +//# sourceMappingURL=NoopTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js new file mode 100644 index 0000000000..5c1b26fa50 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js @@ -0,0 +1,78 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracer = void 0; +var context_1 = require("../api/context"); +var context_utils_1 = require("../trace/context-utils"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +var spancontext_utils_1 = require("./spancontext-utils"); +var context = context_1.ContextAPI.getInstance(); +/** + * No-op implementations of {@link Tracer}. + */ +var NoopTracer = /** @class */ (function () { + function NoopTracer() { + } + // startSpan starts a noop span. + NoopTracer.prototype.startSpan = function (name, options, context) { + var root = Boolean(options === null || options === void 0 ? void 0 : options.root); + if (root) { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + var parentFromContext = context && context_utils_1.getSpanContext(context); + if (isSpanContext(parentFromContext) && + spancontext_utils_1.isSpanContextValid(parentFromContext)) { + return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); + } + else { + return new NonRecordingSpan_1.NonRecordingSpan(); + } + }; + NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { + var opts; + var ctx; + var fn; + if (arguments.length < 2) { + return; + } + else if (arguments.length === 2) { + fn = arg2; + } + else if (arguments.length === 3) { + opts = arg2; + fn = arg3; + } + else { + opts = arg2; + ctx = arg3; + fn = arg4; + } + var parentContext = ctx !== null && ctx !== void 0 ? ctx : context.active(); + var span = this.startSpan(name, opts, parentContext); + var contextWithSpanSet = context_utils_1.setSpan(parentContext, span); + return context.with(contextWithSpanSet, fn, undefined, span); + }; + return NoopTracer; +}()); +exports.NoopTracer = NoopTracer; +function isSpanContext(spanContext) { + return (typeof spanContext === 'object' && + typeof spanContext['spanId'] === 'string' && + typeof spanContext['traceId'] === 'string' && + typeof spanContext['traceFlags'] === 'number'); +} +//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map new file mode 100644 index 0000000000..8559ee79de --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracer.js","sourceRoot":"","sources":["../../../src/trace/NoopTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,0CAA4C;AAE5C,wDAAiE;AACjE,uDAAsD;AAEtD,yDAAyD;AAKzD,IAAM,OAAO,GAAG,oBAAU,CAAC,WAAW,EAAE,CAAC;AAEzC;;GAEG;AACH;IAAA;IAgEA,CAAC;IA/DC,gCAAgC;IAChC,8BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,IAAM,IAAI,GAAG,OAAO,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,CAAC;QACpC,IAAI,IAAI,EAAE;YACR,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;QAED,IAAM,iBAAiB,GAAG,OAAO,IAAI,8BAAc,CAAC,OAAO,CAAC,CAAC;QAE7D,IACE,aAAa,CAAC,iBAAiB,CAAC;YAChC,sCAAkB,CAAC,iBAAiB,CAAC,EACrC;YACA,OAAO,IAAI,mCAAgB,CAAC,iBAAiB,CAAC,CAAC;SAChD;aAAM;YACL,OAAO,IAAI,mCAAgB,EAAE,CAAC;SAC/B;IACH,CAAC;IAiBD,oCAAe,GAAf,UACE,IAAY,EACZ,IAAsB,EACtB,IAAkB,EAClB,IAAQ;QAER,IAAI,IAA6B,CAAC;QAClC,IAAI,GAAwB,CAAC;QAC7B,IAAI,EAAK,CAAC;QAEV,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;YACxB,OAAO;SACR;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YACjC,IAAI,GAAG,IAA+B,CAAC;YACvC,EAAE,GAAG,IAAS,CAAC;SAChB;aAAM;YACL,IAAI,GAAG,IAA+B,CAAC;YACvC,GAAG,GAAG,IAA2B,CAAC;YAClC,EAAE,GAAG,IAAS,CAAC;SAChB;QAED,IAAM,aAAa,GAAG,GAAG,aAAH,GAAG,cAAH,GAAG,GAAI,OAAO,CAAC,MAAM,EAAE,CAAC;QAC9C,IAAM,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE,aAAa,CAAC,CAAC;QACvD,IAAM,kBAAkB,GAAG,uBAAO,CAAC,aAAa,EAAE,IAAI,CAAC,CAAC;QAExD,OAAO,OAAO,CAAC,IAAI,CAAC,kBAAkB,EAAE,EAAE,EAAE,SAAS,EAAE,IAAI,CAAC,CAAC;IAC/D,CAAC;IACH,iBAAC;AAAD,CAAC,AAhED,IAgEC;AAhEY,gCAAU;AAkEvB,SAAS,aAAa,CAAC,WAAgB;IACrC,OAAO,CACL,OAAO,WAAW,KAAK,QAAQ;QAC/B,OAAO,WAAW,CAAC,QAAQ,CAAC,KAAK,QAAQ;QACzC,OAAO,WAAW,CAAC,SAAS,CAAC,KAAK,QAAQ;QAC1C,OAAO,WAAW,CAAC,YAAY,CAAC,KAAK,QAAQ,CAC9C,CAAC;AACJ,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ContextAPI } from '../api/context';\nimport { Context } from '../context/types';\nimport { getSpanContext, setSpan } from '../trace/context-utils';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { isSpanContextValid } from './spancontext-utils';\nimport { SpanOptions } from './SpanOptions';\nimport { SpanContext } from './span_context';\nimport { Tracer } from './tracer';\n\nconst context = ContextAPI.getInstance();\n\n/**\n * No-op implementations of {@link Tracer}.\n */\nexport class NoopTracer implements Tracer {\n // startSpan starts a noop span.\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n const root = Boolean(options?.root);\n if (root) {\n return new NonRecordingSpan();\n }\n\n const parentFromContext = context && getSpanContext(context);\n\n if (\n isSpanContext(parentFromContext) &&\n isSpanContextValid(parentFromContext)\n ) {\n return new NonRecordingSpan(parentFromContext);\n } else {\n return new NonRecordingSpan();\n }\n }\n\n startActiveSpan ReturnType>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n opts: SpanOptions | undefined,\n ctx: Context | undefined,\n fn: F\n ): ReturnType;\n startActiveSpan ReturnType>(\n name: string,\n arg2?: F | SpanOptions,\n arg3?: F | Context,\n arg4?: F\n ): ReturnType | undefined {\n let opts: SpanOptions | undefined;\n let ctx: Context | undefined;\n let fn: F;\n\n if (arguments.length < 2) {\n return;\n } else if (arguments.length === 2) {\n fn = arg2 as F;\n } else if (arguments.length === 3) {\n opts = arg2 as SpanOptions | undefined;\n fn = arg3 as F;\n } else {\n opts = arg2 as SpanOptions | undefined;\n ctx = arg3 as Context | undefined;\n fn = arg4 as F;\n }\n\n const parentContext = ctx ?? context.active();\n const span = this.startSpan(name, opts, parentContext);\n const contextWithSpanSet = setSpan(parentContext, span);\n\n return context.with(contextWithSpanSet, fn, undefined, span);\n }\n}\n\nfunction isSpanContext(spanContext: any): spanContext is SpanContext {\n return (\n typeof spanContext === 'object' &&\n typeof spanContext['spanId'] === 'string' &&\n typeof spanContext['traceId'] === 'string' &&\n typeof spanContext['traceFlags'] === 'number'\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts new file mode 100644 index 0000000000..ec0fe79201 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.d.ts @@ -0,0 +1,13 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +import { TracerProvider } from './tracer_provider'; +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +export declare class NoopTracerProvider implements TracerProvider { + getTracer(_name?: string, _version?: string, _options?: TracerOptions): Tracer; +} +//# sourceMappingURL=NoopTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js new file mode 100644 index 0000000000..d08649dd16 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js @@ -0,0 +1,35 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.NoopTracerProvider = void 0; +var NoopTracer_1 = require("./NoopTracer"); +/** + * An implementation of the {@link TracerProvider} which returns an impotent + * Tracer for all calls to `getTracer`. + * + * All operations are no-op. + */ +var NoopTracerProvider = /** @class */ (function () { + function NoopTracerProvider() { + } + NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { + return new NoopTracer_1.NoopTracer(); + }; + return NoopTracerProvider; +}()); +exports.NoopTracerProvider = NoopTracerProvider; +//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map new file mode 100644 index 0000000000..a379756d12 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"NoopTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/NoopTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,2CAA0C;AAK1C;;;;;GAKG;AACH;IAAA;IAQA,CAAC;IAPC,sCAAS,GAAT,UACE,KAAc,EACd,QAAiB,EACjB,QAAwB;QAExB,OAAO,IAAI,uBAAU,EAAE,CAAC;IAC1B,CAAC;IACH,yBAAC;AAAD,CAAC,AARD,IAQC;AARY,gDAAkB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { NoopTracer } from './NoopTracer';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\nimport { TracerProvider } from './tracer_provider';\n\n/**\n * An implementation of the {@link TracerProvider} which returns an impotent\n * Tracer for all calls to `getTracer`.\n *\n * All operations are no-op.\n */\nexport class NoopTracerProvider implements TracerProvider {\n getTracer(\n _name?: string,\n _version?: string,\n _options?: TracerOptions\n ): Tracer {\n return new NoopTracer();\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts new file mode 100644 index 0000000000..116cc5c1f6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.d.ts @@ -0,0 +1,27 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * Proxy tracer provided by the proxy tracer provider + */ +export declare class ProxyTracer implements Tracer { + private _provider; + readonly name: string; + readonly version?: string | undefined; + readonly options?: TracerOptions | undefined; + private _delegate?; + constructor(_provider: TracerDelegator, name: string, version?: string | undefined, options?: TracerOptions | undefined); + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + startActiveSpan unknown>(_name: string, _options: F | SpanOptions, _context?: F | Context, _fn?: F): ReturnType; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + private _getTracer; +} +export interface TracerDelegator { + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js new file mode 100644 index 0000000000..530a379045 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js @@ -0,0 +1,56 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracer = void 0; +var NoopTracer_1 = require("./NoopTracer"); +var NOOP_TRACER = new NoopTracer_1.NoopTracer(); +/** + * Proxy tracer provided by the proxy tracer provider + */ +var ProxyTracer = /** @class */ (function () { + function ProxyTracer(_provider, name, version, options) { + this._provider = _provider; + this.name = name; + this.version = version; + this.options = options; + } + ProxyTracer.prototype.startSpan = function (name, options, context) { + return this._getTracer().startSpan(name, options, context); + }; + ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { + var tracer = this._getTracer(); + return Reflect.apply(tracer.startActiveSpan, tracer, arguments); + }; + /** + * Try to get a tracer from the proxy tracer provider. + * If the proxy tracer provider has no delegate, return a noop tracer. + */ + ProxyTracer.prototype._getTracer = function () { + if (this._delegate) { + return this._delegate; + } + var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); + if (!tracer) { + return NOOP_TRACER; + } + this._delegate = tracer; + return this._delegate; + }; + return ProxyTracer; +}()); +exports.ProxyTracer = ProxyTracer; +//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map new file mode 100644 index 0000000000..b8e4b53fe4 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracer.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,2CAA0C;AAM1C,IAAM,WAAW,GAAG,IAAI,uBAAU,EAAE,CAAC;AAErC;;GAEG;AACH;IAIE,qBACU,SAA0B,EAClB,IAAY,EACZ,OAAgB,EAChB,OAAuB;QAH/B,cAAS,GAAT,SAAS,CAAiB;QAClB,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAS;QAChB,YAAO,GAAP,OAAO,CAAgB;IACtC,CAAC;IAEJ,+BAAS,GAAT,UAAU,IAAY,EAAE,OAAqB,EAAE,OAAiB;QAC9D,OAAO,IAAI,CAAC,UAAU,EAAE,CAAC,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC7D,CAAC;IAED,qCAAe,GAAf,UACE,KAAa,EACb,QAAyB,EACzB,QAAsB,EACtB,GAAO;QAEP,IAAM,MAAM,GAAG,IAAI,CAAC,UAAU,EAAE,CAAC;QACjC,OAAO,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,eAAe,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC;IAClE,CAAC;IAED;;;OAGG;IACK,gCAAU,GAAlB;QACE,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,OAAO,IAAI,CAAC,SAAS,CAAC;SACvB;QAED,IAAM,MAAM,GAAG,IAAI,CAAC,SAAS,CAAC,iBAAiB,CAAC,IAAI,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QAEvF,IAAI,CAAC,MAAM,EAAE;YACX,OAAO,WAAW,CAAC;SACpB;QAED,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC;QACxB,OAAO,IAAI,CAAC,SAAS,CAAC;IACxB,CAAC;IACH,kBAAC;AAAD,CAAC,AA3CD,IA2CC;AA3CY,kCAAW","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { NoopTracer } from './NoopTracer';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER = new NoopTracer();\n\n/**\n * Proxy tracer provided by the proxy tracer provider\n */\nexport class ProxyTracer implements Tracer {\n // When a real implementation is provided, this will be it\n private _delegate?: Tracer;\n\n constructor(\n private _provider: TracerDelegator,\n public readonly name: string,\n public readonly version?: string,\n public readonly options?: TracerOptions\n ) {}\n\n startSpan(name: string, options?: SpanOptions, context?: Context): Span {\n return this._getTracer().startSpan(name, options, context);\n }\n\n startActiveSpan unknown>(\n _name: string,\n _options: F | SpanOptions,\n _context?: F | Context,\n _fn?: F\n ): ReturnType {\n const tracer = this._getTracer();\n return Reflect.apply(tracer.startActiveSpan, tracer, arguments);\n }\n\n /**\n * Try to get a tracer from the proxy tracer provider.\n * If the proxy tracer provider has no delegate, return a noop tracer.\n */\n private _getTracer() {\n if (this._delegate) {\n return this._delegate;\n }\n\n const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options);\n\n if (!tracer) {\n return NOOP_TRACER;\n }\n\n this._delegate = tracer;\n return this._delegate;\n }\n}\n\nexport interface TracerDelegator {\n getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts new file mode 100644 index 0000000000..ee7eafa94d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.d.ts @@ -0,0 +1,25 @@ +import { Tracer } from './tracer'; +import { TracerProvider } from './tracer_provider'; +import { TracerOptions } from './tracer_options'; +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +export declare class ProxyTracerProvider implements TracerProvider { + private _delegate?; + /** + * Get a {@link ProxyTracer} + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; + getDelegate(): TracerProvider; + /** + * Set the delegate tracer provider + */ + setDelegate(delegate: TracerProvider): void; + getDelegateTracer(name: string, version?: string, options?: TracerOptions): Tracer | undefined; +} +//# sourceMappingURL=ProxyTracerProvider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js new file mode 100644 index 0000000000..32debbba84 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js @@ -0,0 +1,57 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ProxyTracerProvider = void 0; +var ProxyTracer_1 = require("./ProxyTracer"); +var NoopTracerProvider_1 = require("./NoopTracerProvider"); +var NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); +/** + * Tracer provider which provides {@link ProxyTracer}s. + * + * Before a delegate is set, tracers provided are NoOp. + * When a delegate is set, traces are provided from the delegate. + * When a delegate is set after tracers have already been provided, + * all tracers already provided will use the provided delegate implementation. + */ +var ProxyTracerProvider = /** @class */ (function () { + function ProxyTracerProvider() { + } + /** + * Get a {@link ProxyTracer} + */ + ProxyTracerProvider.prototype.getTracer = function (name, version, options) { + var _a; + return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); + }; + ProxyTracerProvider.prototype.getDelegate = function () { + var _a; + return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; + }; + /** + * Set the delegate tracer provider + */ + ProxyTracerProvider.prototype.setDelegate = function (delegate) { + this._delegate = delegate; + }; + ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { + var _a; + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); + }; + return ProxyTracerProvider; +}()); +exports.ProxyTracerProvider = ProxyTracerProvider; +//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map new file mode 100644 index 0000000000..4140662c6c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ProxyTracerProvider.js","sourceRoot":"","sources":["../../../src/trace/ProxyTracerProvider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH,6CAA4C;AAC5C,2DAA0D;AAG1D,IAAM,oBAAoB,GAAG,IAAI,uCAAkB,EAAE,CAAC;AAEtD;;;;;;;GAOG;AACH;IAAA;IA+BA,CAAC;IA5BC;;OAEG;IACH,uCAAS,GAAT,UAAU,IAAY,EAAE,OAAgB,EAAE,OAAuB;;QAC/D,OAAO,CACL,MAAA,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,mCAC9C,IAAI,yBAAW,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAC9C,CAAC;IACJ,CAAC;IAED,yCAAW,GAAX;;QACE,OAAO,MAAA,IAAI,CAAC,SAAS,mCAAI,oBAAoB,CAAC;IAChD,CAAC;IAED;;OAEG;IACH,yCAAW,GAAX,UAAY,QAAwB;QAClC,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAC;IAC5B,CAAC;IAED,+CAAiB,GAAjB,UACE,IAAY,EACZ,OAAgB,EAChB,OAAuB;;QAEvB,OAAO,MAAA,IAAI,CAAC,SAAS,0CAAE,SAAS,CAAC,IAAI,EAAE,OAAO,EAAE,OAAO,CAAC,CAAC;IAC3D,CAAC;IACH,0BAAC;AAAD,CAAC,AA/BD,IA+BC;AA/BY,kDAAmB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerProvider } from './tracer_provider';\nimport { ProxyTracer } from './ProxyTracer';\nimport { NoopTracerProvider } from './NoopTracerProvider';\nimport { TracerOptions } from './tracer_options';\n\nconst NOOP_TRACER_PROVIDER = new NoopTracerProvider();\n\n/**\n * Tracer provider which provides {@link ProxyTracer}s.\n *\n * Before a delegate is set, tracers provided are NoOp.\n * When a delegate is set, traces are provided from the delegate.\n * When a delegate is set after tracers have already been provided,\n * all tracers already provided will use the provided delegate implementation.\n */\nexport class ProxyTracerProvider implements TracerProvider {\n private _delegate?: TracerProvider;\n\n /**\n * Get a {@link ProxyTracer}\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer {\n return (\n this.getDelegateTracer(name, version, options) ??\n new ProxyTracer(this, name, version, options)\n );\n }\n\n getDelegate(): TracerProvider {\n return this._delegate ?? NOOP_TRACER_PROVIDER;\n }\n\n /**\n * Set the delegate tracer provider\n */\n setDelegate(delegate: TracerProvider) {\n this._delegate = delegate;\n }\n\n getDelegateTracer(\n name: string,\n version?: string,\n options?: TracerOptions\n ): Tracer | undefined {\n return this._delegate?.getTracer(name, version, options);\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts new file mode 100644 index 0000000000..dd8dc811e6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.d.ts @@ -0,0 +1,30 @@ +import { Context } from '../context/types'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SamplingResult } from './SamplingResult'; +import { SpanKind } from './span_kind'; +/** + * This interface represent a sampler. Sampling is a mechanism to control the + * noise and overhead introduced by OpenTelemetry by reducing the number of + * samples of traces collected and sent to the backend. + */ +export interface Sampler { + /** + * Checks whether span needs to be created and tracked. + * + * @param context Parent Context which may contain a span. + * @param traceId of the span to be created. It can be different from the + * traceId in the {@link SpanContext}. Typically in situations when the + * span to be created starts a new trace. + * @param spanName of the span to be created. + * @param spanKind of the span to be created. + * @param attributes Initial set of SpanAttributes for the Span being constructed. + * @param links Collection of links that will be associated with the Span to + * be created. Typically useful for batch operations. + * @returns a {@link SamplingResult}. + */ + shouldSample(context: Context, traceId: string, spanName: string, spanKind: SpanKind, attributes: SpanAttributes, links: Link[]): SamplingResult; + /** Returns the sampler name or short description with the configuration. */ + toString(): string; +} +//# sourceMappingURL=Sampler.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js new file mode 100644 index 0000000000..6034482e4c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map new file mode 100644 index 0000000000..bb6528e1ea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js.map @@ -0,0 +1 @@ +{"version":3,"file":"Sampler.js","sourceRoot":"","sources":["../../../src/trace/Sampler.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SamplingResult } from './SamplingResult';\nimport { SpanKind } from './span_kind';\n\n/**\n * This interface represent a sampler. Sampling is a mechanism to control the\n * noise and overhead introduced by OpenTelemetry by reducing the number of\n * samples of traces collected and sent to the backend.\n */\nexport interface Sampler {\n /**\n * Checks whether span needs to be created and tracked.\n *\n * @param context Parent Context which may contain a span.\n * @param traceId of the span to be created. It can be different from the\n * traceId in the {@link SpanContext}. Typically in situations when the\n * span to be created starts a new trace.\n * @param spanName of the span to be created.\n * @param spanKind of the span to be created.\n * @param attributes Initial set of SpanAttributes for the Span being constructed.\n * @param links Collection of links that will be associated with the Span to\n * be created. Typically useful for batch operations.\n * @returns a {@link SamplingResult}.\n */\n shouldSample(\n context: Context,\n traceId: string,\n spanName: string,\n spanKind: SpanKind,\n attributes: SpanAttributes,\n links: Link[]\n ): SamplingResult;\n\n /** Returns the sampler name or short description with the configuration. */\n toString(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts new file mode 100644 index 0000000000..9394846575 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.d.ts @@ -0,0 +1,39 @@ +import { SpanAttributes } from './attributes'; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +export declare enum SamplingDecision { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + NOT_RECORD = 0, + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + RECORD = 1, + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + RECORD_AND_SAMPLED = 2 +} +/** + * A sampling result contains a decision for a {@link Span} and additional + * attributes the sampler would like to added to the Span. + */ +export interface SamplingResult { + /** + * A sampling decision, refer to {@link SamplingDecision} for details. + */ + decision: SamplingDecision; + /** + * The list of attributes returned by SamplingResult MUST be immutable. + * Caller may call {@link Sampler}.shouldSample any number of times and + * can safely cache the returned value. + */ + attributes?: Readonly; +} +//# sourceMappingURL=SamplingResult.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js new file mode 100644 index 0000000000..d102e82b3d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js @@ -0,0 +1,41 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SamplingDecision = void 0; +/** + * A sampling decision that determines how a {@link Span} will be recorded + * and collected. + */ +var SamplingDecision; +(function (SamplingDecision) { + /** + * `Span.isRecording() === false`, span will not be recorded and all events + * and attributes will be dropped. + */ + SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; + /** + * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} + * MUST NOT be set. + */ + SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; + /** + * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} + * MUST be set. + */ + SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; +})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); +//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map new file mode 100644 index 0000000000..7a850e996d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SamplingResult.js","sourceRoot":"","sources":["../../../src/trace/SamplingResult.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAIH;;;GAGG;AACH,IAAY,gBAgBX;AAhBD,WAAY,gBAAgB;IAC1B;;;OAGG;IACH,mEAAU,CAAA;IACV;;;OAGG;IACH,2DAAM,CAAA;IACN;;;OAGG;IACH,mFAAkB,CAAA;AACpB,CAAC,EAhBW,gBAAgB,GAAhB,wBAAgB,KAAhB,wBAAgB,QAgB3B","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\n\n/**\n * A sampling decision that determines how a {@link Span} will be recorded\n * and collected.\n */\nexport enum SamplingDecision {\n /**\n * `Span.isRecording() === false`, span will not be recorded and all events\n * and attributes will be dropped.\n */\n NOT_RECORD,\n /**\n * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags}\n * MUST NOT be set.\n */\n RECORD,\n /**\n * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags}\n * MUST be set.\n */\n RECORD_AND_SAMPLED,\n}\n\n/**\n * A sampling result contains a decision for a {@link Span} and additional\n * attributes the sampler would like to added to the Span.\n */\nexport interface SamplingResult {\n /**\n * A sampling decision, refer to {@link SamplingDecision} for details.\n */\n decision: SamplingDecision;\n /**\n * The list of attributes returned by SamplingResult MUST be immutable.\n * Caller may call {@link Sampler}.shouldSample any number of times and\n * can safely cache the returned value.\n */\n attributes?: Readonly;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts new file mode 100644 index 0000000000..c8045689b7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.d.ts @@ -0,0 +1,23 @@ +import { TimeInput } from '../common/Time'; +import { SpanAttributes } from './attributes'; +import { Link } from './link'; +import { SpanKind } from './span_kind'; +/** + * Options needed for span creation + */ +export interface SpanOptions { + /** + * The SpanKind of a span + * @default {@link SpanKind.INTERNAL} + */ + kind?: SpanKind; + /** A span's attributes */ + attributes?: SpanAttributes; + /** {@link Link}s span to other spans */ + links?: Link[]; + /** A manually specified start time for the created `Span` object. */ + startTime?: TimeInput; + /** The new span should be a root span. (Ignore parent from context). */ + root?: boolean; +} +//# sourceMappingURL=SpanOptions.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js new file mode 100644 index 0000000000..cb582305bf --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map new file mode 100644 index 0000000000..049f685860 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js.map @@ -0,0 +1 @@ +{"version":3,"file":"SpanOptions.js","sourceRoot":"","sources":["../../../src/trace/SpanOptions.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes } from './attributes';\nimport { Link } from './link';\nimport { SpanKind } from './span_kind';\n\n/**\n * Options needed for span creation\n */\nexport interface SpanOptions {\n /**\n * The SpanKind of a span\n * @default {@link SpanKind.INTERNAL}\n */\n kind?: SpanKind;\n\n /** A span's attributes */\n attributes?: SpanAttributes;\n\n /** {@link Link}s span to other spans */\n links?: Link[];\n\n /** A manually specified start time for the created `Span` object. */\n startTime?: TimeInput;\n\n /** The new span should be a root span. (Ignore parent from context). */\n root?: boolean;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts new file mode 100644 index 0000000000..a2a5d2a2f9 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.d.ts @@ -0,0 +1,10 @@ +import { Attributes, AttributeValue } from '../common/Attributes'; +/** + * @deprecated please use {@link Attributes} + */ +export declare type SpanAttributes = Attributes; +/** + * @deprecated please use {@link AttributeValue} + */ +export declare type SpanAttributeValue = AttributeValue; +//# sourceMappingURL=attributes.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js b/node_modules/@opentelemetry/api/build/src/trace/attributes.js new file mode 100644 index 0000000000..c6eb97a3c7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map new file mode 100644 index 0000000000..4de58c426b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/attributes.js.map @@ -0,0 +1 @@ +{"version":3,"file":"attributes.js","sourceRoot":"","sources":["../../../src/trace/attributes.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Attributes, AttributeValue } from '../common/Attributes';\n\n/**\n * @deprecated please use {@link Attributes}\n */\nexport type SpanAttributes = Attributes;\n\n/**\n * @deprecated please use {@link AttributeValue}\n */\nexport type SpanAttributeValue = AttributeValue;\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts new file mode 100644 index 0000000000..1d46619bea --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.d.ts @@ -0,0 +1,37 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanContext } from './span_context'; +/** + * Return the span if one exists + * + * @param context context to get span from + */ +export declare function getSpan(context: Context): Span | undefined; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +export declare function setSpan(context: Context, span: Span): Context; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +export declare function deleteSpan(context: Context): Context; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +export declare function setSpanContext(context: Context, spanContext: SpanContext): Context; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +export declare function getSpanContext(context: Context): SpanContext | undefined; +//# sourceMappingURL=context-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js new file mode 100644 index 0000000000..a56c73d001 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js @@ -0,0 +1,74 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getSpan = void 0; +var context_1 = require("../context/context"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +/** + * span key + */ +var SPAN_KEY = context_1.createContextKey('OpenTelemetry Context Key SPAN'); +/** + * Return the span if one exists + * + * @param context context to get span from + */ +function getSpan(context) { + return context.getValue(SPAN_KEY) || undefined; +} +exports.getSpan = getSpan; +/** + * Set the span on a context + * + * @param context context to use as parent + * @param span span to set active + */ +function setSpan(context, span) { + return context.setValue(SPAN_KEY, span); +} +exports.setSpan = setSpan; +/** + * Remove current span stored in the context + * + * @param context context to delete span from + */ +function deleteSpan(context) { + return context.deleteValue(SPAN_KEY); +} +exports.deleteSpan = deleteSpan; +/** + * Wrap span context in a NoopSpan and set as span in a new + * context + * + * @param context context to set active span on + * @param spanContext span context to be wrapped + */ +function setSpanContext(context, spanContext) { + return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); +} +exports.setSpanContext = setSpanContext; +/** + * Get the span context of the span if it exists. + * + * @param context context to get values from + */ +function getSpanContext(context) { + var _a; + return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); +} +exports.getSpanContext = getSpanContext; +//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map new file mode 100644 index 0000000000..d977585524 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"context-utils.js","sourceRoot":"","sources":["../../../src/trace/context-utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,8CAAsD;AAItD,uDAAsD;AAEtD;;GAEG;AACH,IAAM,QAAQ,GAAG,0BAAgB,CAAC,gCAAgC,CAAC,CAAC;AAEpE;;;;GAIG;AACH,SAAgB,OAAO,CAAC,OAAgB;IACtC,OAAQ,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAU,IAAI,SAAS,CAAC;AAC3D,CAAC;AAFD,0BAEC;AAED;;;;;GAKG;AACH,SAAgB,OAAO,CAAC,OAAgB,EAAE,IAAU;IAClD,OAAO,OAAO,CAAC,QAAQ,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC1C,CAAC;AAFD,0BAEC;AAED;;;;GAIG;AACH,SAAgB,UAAU,CAAC,OAAgB;IACzC,OAAO,OAAO,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC;AACvC,CAAC;AAFD,gCAEC;AAED;;;;;;GAMG;AACH,SAAgB,cAAc,CAC5B,OAAgB,EAChB,WAAwB;IAExB,OAAO,OAAO,CAAC,OAAO,EAAE,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC,CAAC;AAC7D,CAAC;AALD,wCAKC;AAED;;;;GAIG;AACH,SAAgB,cAAc,CAAC,OAAgB;;IAC7C,OAAO,MAAA,OAAO,CAAC,OAAO,CAAC,0CAAE,WAAW,EAAE,CAAC;AACzC,CAAC;AAFD,wCAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { createContextKey } from '../context/context';\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\nimport { NonRecordingSpan } from './NonRecordingSpan';\n\n/**\n * span key\n */\nconst SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN');\n\n/**\n * Return the span if one exists\n *\n * @param context context to get span from\n */\nexport function getSpan(context: Context): Span | undefined {\n return (context.getValue(SPAN_KEY) as Span) || undefined;\n}\n\n/**\n * Set the span on a context\n *\n * @param context context to use as parent\n * @param span span to set active\n */\nexport function setSpan(context: Context, span: Span): Context {\n return context.setValue(SPAN_KEY, span);\n}\n\n/**\n * Remove current span stored in the context\n *\n * @param context context to delete span from\n */\nexport function deleteSpan(context: Context): Context {\n return context.deleteValue(SPAN_KEY);\n}\n\n/**\n * Wrap span context in a NoopSpan and set as span in a new\n * context\n *\n * @param context context to set active span on\n * @param spanContext span context to be wrapped\n */\nexport function setSpanContext(\n context: Context,\n spanContext: SpanContext\n): Context {\n return setSpan(context, new NonRecordingSpan(spanContext));\n}\n\n/**\n * Get the span context of the span if it exists.\n *\n * @param context context to get values from\n */\nexport function getSpanContext(context: Context): SpanContext | undefined {\n return getSpan(context)?.spanContext();\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts new file mode 100644 index 0000000000..9ed5ecb710 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.d.ts @@ -0,0 +1,22 @@ +import { TraceState } from '../trace_state'; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +export declare class TraceStateImpl implements TraceState { + private _internalState; + constructor(rawTraceState?: string); + set(key: string, value: string): TraceStateImpl; + unset(key: string): TraceStateImpl; + get(key: string): string | undefined; + serialize(): string; + private _parse; + private _keys; + private _clone; +} +//# sourceMappingURL=tracestate-impl.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js new file mode 100644 index 0000000000..576cb89acb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js @@ -0,0 +1,105 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceStateImpl = void 0; +var tracestate_validators_1 = require("./tracestate-validators"); +var MAX_TRACE_STATE_ITEMS = 32; +var MAX_TRACE_STATE_LEN = 512; +var LIST_MEMBERS_SEPARATOR = ','; +var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; +/** + * TraceState must be a class and not a simple object type because of the spec + * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). + * + * Here is the list of allowed mutations: + * - New key-value pair should be added into the beginning of the list + * - The value of any key can be updated. Modified keys MUST be moved to the + * beginning of the list. + */ +var TraceStateImpl = /** @class */ (function () { + function TraceStateImpl(rawTraceState) { + this._internalState = new Map(); + if (rawTraceState) + this._parse(rawTraceState); + } + TraceStateImpl.prototype.set = function (key, value) { + // TODO: Benchmark the different approaches(map vs list) and + // use the faster one. + var traceState = this._clone(); + if (traceState._internalState.has(key)) { + traceState._internalState.delete(key); + } + traceState._internalState.set(key, value); + return traceState; + }; + TraceStateImpl.prototype.unset = function (key) { + var traceState = this._clone(); + traceState._internalState.delete(key); + return traceState; + }; + TraceStateImpl.prototype.get = function (key) { + return this._internalState.get(key); + }; + TraceStateImpl.prototype.serialize = function () { + var _this = this; + return this._keys() + .reduce(function (agg, key) { + agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); + return agg; + }, []) + .join(LIST_MEMBERS_SEPARATOR); + }; + TraceStateImpl.prototype._parse = function (rawTraceState) { + if (rawTraceState.length > MAX_TRACE_STATE_LEN) + return; + this._internalState = rawTraceState + .split(LIST_MEMBERS_SEPARATOR) + .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning + .reduce(function (agg, part) { + var listMember = part.trim(); // Optional Whitespace (OWS) handling + var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); + if (i !== -1) { + var key = listMember.slice(0, i); + var value = listMember.slice(i + 1, part.length); + if (tracestate_validators_1.validateKey(key) && tracestate_validators_1.validateValue(value)) { + agg.set(key, value); + } + else { + // TODO: Consider to add warning log + } + } + return agg; + }, new Map()); + // Because of the reverse() requirement, trunc must be done after map is created + if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { + this._internalState = new Map(Array.from(this._internalState.entries()) + .reverse() // Use reverse same as original tracestate parse chain + .slice(0, MAX_TRACE_STATE_ITEMS)); + } + }; + TraceStateImpl.prototype._keys = function () { + return Array.from(this._internalState.keys()).reverse(); + }; + TraceStateImpl.prototype._clone = function () { + var traceState = new TraceStateImpl(); + traceState._internalState = new Map(this._internalState); + return traceState; + }; + return TraceStateImpl; +}()); +exports.TraceStateImpl = TraceStateImpl; +//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map new file mode 100644 index 0000000000..dccaa90819 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-impl.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-impl.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,iEAAqE;AAErE,IAAM,qBAAqB,GAAG,EAAE,CAAC;AACjC,IAAM,mBAAmB,GAAG,GAAG,CAAC;AAChC,IAAM,sBAAsB,GAAG,GAAG,CAAC;AACnC,IAAM,8BAA8B,GAAG,GAAG,CAAC;AAE3C;;;;;;;;GAQG;AACH;IAGE,wBAAY,aAAsB;QAF1B,mBAAc,GAAwB,IAAI,GAAG,EAAE,CAAC;QAGtD,IAAI,aAAa;YAAE,IAAI,CAAC,MAAM,CAAC,aAAa,CAAC,CAAC;IAChD,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW,EAAE,KAAa;QAC5B,4DAA4D;QAC5D,sBAAsB;QACtB,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,IAAI,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE;YACtC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;SACvC;QACD,UAAU,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC1C,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,8BAAK,GAAL,UAAM,GAAW;QACf,IAAM,UAAU,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;QACjC,UAAU,CAAC,cAAc,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACtC,OAAO,UAAU,CAAC;IACpB,CAAC;IAED,4BAAG,GAAH,UAAI,GAAW;QACb,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;IACtC,CAAC;IAED,kCAAS,GAAT;QAAA,iBAOC;QANC,OAAO,IAAI,CAAC,KAAK,EAAE;aAChB,MAAM,CAAC,UAAC,GAAa,EAAE,GAAG;YACzB,GAAG,CAAC,IAAI,CAAC,GAAG,GAAG,8BAA8B,GAAG,KAAI,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC;YAC/D,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,EAAE,CAAC;aACL,IAAI,CAAC,sBAAsB,CAAC,CAAC;IAClC,CAAC;IAEO,+BAAM,GAAd,UAAe,aAAqB;QAClC,IAAI,aAAa,CAAC,MAAM,GAAG,mBAAmB;YAAE,OAAO;QACvD,IAAI,CAAC,cAAc,GAAG,aAAa;aAChC,KAAK,CAAC,sBAAsB,CAAC;aAC7B,OAAO,EAAE,CAAC,2EAA2E;aACrF,MAAM,CAAC,UAAC,GAAwB,EAAE,IAAY;YAC7C,IAAM,UAAU,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC,qCAAqC;YACrE,IAAM,CAAC,GAAG,UAAU,CAAC,OAAO,CAAC,8BAA8B,CAAC,CAAC;YAC7D,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE;gBACZ,IAAM,GAAG,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;gBACnC,IAAM,KAAK,GAAG,UAAU,CAAC,KAAK,CAAC,CAAC,GAAG,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;gBACnD,IAAI,mCAAW,CAAC,GAAG,CAAC,IAAI,qCAAa,CAAC,KAAK,CAAC,EAAE;oBAC5C,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;iBACrB;qBAAM;oBACL,oCAAoC;iBACrC;aACF;YACD,OAAO,GAAG,CAAC;QACb,CAAC,EAAE,IAAI,GAAG,EAAE,CAAC,CAAC;QAEhB,gFAAgF;QAChF,IAAI,IAAI,CAAC,cAAc,CAAC,IAAI,GAAG,qBAAqB,EAAE;YACpD,IAAI,CAAC,cAAc,GAAG,IAAI,GAAG,CAC3B,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC;iBACtC,OAAO,EAAE,CAAC,sDAAsD;iBAChE,KAAK,CAAC,CAAC,EAAE,qBAAqB,CAAC,CACnC,CAAC;SACH;IACH,CAAC;IAEO,8BAAK,GAAb;QACE,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC,OAAO,EAAE,CAAC;IAC1D,CAAC;IAEO,+BAAM,GAAd;QACE,IAAM,UAAU,GAAG,IAAI,cAAc,EAAE,CAAC;QACxC,UAAU,CAAC,cAAc,GAAG,IAAI,GAAG,CAAC,IAAI,CAAC,cAAc,CAAC,CAAC;QACzD,OAAO,UAAU,CAAC;IACpB,CAAC;IACH,qBAAC;AAAD,CAAC,AA5ED,IA4EC;AA5EY,wCAAc","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { validateKey, validateValue } from './tracestate-validators';\n\nconst MAX_TRACE_STATE_ITEMS = 32;\nconst MAX_TRACE_STATE_LEN = 512;\nconst LIST_MEMBERS_SEPARATOR = ',';\nconst LIST_MEMBER_KEY_VALUE_SPLITTER = '=';\n\n/**\n * TraceState must be a class and not a simple object type because of the spec\n * requirement (https://www.w3.org/TR/trace-context/#tracestate-field).\n *\n * Here is the list of allowed mutations:\n * - New key-value pair should be added into the beginning of the list\n * - The value of any key can be updated. Modified keys MUST be moved to the\n * beginning of the list.\n */\nexport class TraceStateImpl implements TraceState {\n private _internalState: Map = new Map();\n\n constructor(rawTraceState?: string) {\n if (rawTraceState) this._parse(rawTraceState);\n }\n\n set(key: string, value: string): TraceStateImpl {\n // TODO: Benchmark the different approaches(map vs list) and\n // use the faster one.\n const traceState = this._clone();\n if (traceState._internalState.has(key)) {\n traceState._internalState.delete(key);\n }\n traceState._internalState.set(key, value);\n return traceState;\n }\n\n unset(key: string): TraceStateImpl {\n const traceState = this._clone();\n traceState._internalState.delete(key);\n return traceState;\n }\n\n get(key: string): string | undefined {\n return this._internalState.get(key);\n }\n\n serialize(): string {\n return this._keys()\n .reduce((agg: string[], key) => {\n agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key));\n return agg;\n }, [])\n .join(LIST_MEMBERS_SEPARATOR);\n }\n\n private _parse(rawTraceState: string) {\n if (rawTraceState.length > MAX_TRACE_STATE_LEN) return;\n this._internalState = rawTraceState\n .split(LIST_MEMBERS_SEPARATOR)\n .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning\n .reduce((agg: Map, part: string) => {\n const listMember = part.trim(); // Optional Whitespace (OWS) handling\n const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER);\n if (i !== -1) {\n const key = listMember.slice(0, i);\n const value = listMember.slice(i + 1, part.length);\n if (validateKey(key) && validateValue(value)) {\n agg.set(key, value);\n } else {\n // TODO: Consider to add warning log\n }\n }\n return agg;\n }, new Map());\n\n // Because of the reverse() requirement, trunc must be done after map is created\n if (this._internalState.size > MAX_TRACE_STATE_ITEMS) {\n this._internalState = new Map(\n Array.from(this._internalState.entries())\n .reverse() // Use reverse same as original tracestate parse chain\n .slice(0, MAX_TRACE_STATE_ITEMS)\n );\n }\n }\n\n private _keys(): string[] {\n return Array.from(this._internalState.keys()).reverse();\n }\n\n private _clone(): TraceStateImpl {\n const traceState = new TraceStateImpl();\n traceState._internalState = new Map(this._internalState);\n return traceState;\n }\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts new file mode 100644 index 0000000000..4917f99d1e --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.d.ts @@ -0,0 +1,15 @@ +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +export declare function validateKey(key: string): boolean; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +export declare function validateValue(value: string): boolean; +//# sourceMappingURL=tracestate-validators.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js new file mode 100644 index 0000000000..008a4fdeae --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js @@ -0,0 +1,46 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.validateValue = exports.validateKey = void 0; +var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; +var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; +var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; +var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); +var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; +var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; +/** + * Key is opaque string up to 256 characters printable. It MUST begin with a + * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, + * underscores _, dashes -, asterisks *, and forward slashes /. + * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the + * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. + * see https://www.w3.org/TR/trace-context/#key + */ +function validateKey(key) { + return VALID_KEY_REGEX.test(key); +} +exports.validateKey = validateKey; +/** + * Value is opaque string up to 256 characters printable ASCII RFC0020 + * characters (i.e., the range 0x20 to 0x7E) except comma , and =. + */ +function validateValue(value) { + return (VALID_VALUE_BASE_REGEX.test(value) && + !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); +} +exports.validateValue = validateValue; +//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map new file mode 100644 index 0000000000..fc5cee189d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracestate-validators.js","sourceRoot":"","sources":["../../../../src/trace/internal/tracestate-validators.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,IAAM,oBAAoB,GAAG,cAAc,CAAC;AAC5C,IAAM,SAAS,GAAG,UAAQ,oBAAoB,YAAS,CAAC;AACxD,IAAM,gBAAgB,GAAG,aAAW,oBAAoB,qBAAgB,oBAAoB,WAAQ,CAAC;AACrG,IAAM,eAAe,GAAG,IAAI,MAAM,CAAC,SAAO,SAAS,SAAI,gBAAgB,OAAI,CAAC,CAAC;AAC7E,IAAM,sBAAsB,GAAG,qBAAqB,CAAC;AACrD,IAAM,+BAA+B,GAAG,KAAK,CAAC;AAE9C;;;;;;;GAOG;AACH,SAAgB,WAAW,CAAC,GAAW;IACrC,OAAO,eAAe,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AACnC,CAAC;AAFD,kCAEC;AAED;;;GAGG;AACH,SAAgB,aAAa,CAAC,KAAa;IACzC,OAAO,CACL,sBAAsB,CAAC,IAAI,CAAC,KAAK,CAAC;QAClC,CAAC,+BAA+B,CAAC,IAAI,CAAC,KAAK,CAAC,CAC7C,CAAC;AACJ,CAAC;AALD,sCAKC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nconst VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]';\nconst VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`;\nconst VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`;\nconst VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`);\nconst VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/;\nconst INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/;\n\n/**\n * Key is opaque string up to 256 characters printable. It MUST begin with a\n * lowercase letter, and can only contain lowercase letters a-z, digits 0-9,\n * underscores _, dashes -, asterisks *, and forward slashes /.\n * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the\n * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key.\n * see https://www.w3.org/TR/trace-context/#key\n */\nexport function validateKey(key: string): boolean {\n return VALID_KEY_REGEX.test(key);\n}\n\n/**\n * Value is opaque string up to 256 characters printable ASCII RFC0020\n * characters (i.e., the range 0x20 to 0x7E) except comma , and =.\n */\nexport function validateValue(value: string): boolean {\n return (\n VALID_VALUE_BASE_REGEX.test(value) &&\n !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)\n );\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts new file mode 100644 index 0000000000..e3b51fe45b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.d.ts @@ -0,0 +1,3 @@ +import { TraceState } from '../trace_state'; +export declare function createTraceState(rawTraceState?: string): TraceState; +//# sourceMappingURL=utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js new file mode 100644 index 0000000000..93c9b31094 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js @@ -0,0 +1,24 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTraceState = void 0; +var tracestate_impl_1 = require("./tracestate-impl"); +function createTraceState(rawTraceState) { + return new tracestate_impl_1.TraceStateImpl(rawTraceState); +} +exports.createTraceState = createTraceState; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map new file mode 100644 index 0000000000..b543e48f8c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../../../src/trace/internal/utils.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,qDAAmD;AAGnD,SAAgB,gBAAgB,CAAC,aAAsB;IACrD,OAAO,IAAI,gCAAc,CAAC,aAAa,CAAC,CAAC;AAC3C,CAAC;AAFD,4CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from '../trace_state';\nimport { TraceStateImpl } from './tracestate-impl';\n\n\nexport function createTraceState(rawTraceState?: string): TraceState {\n return new TraceStateImpl(rawTraceState);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts new file mode 100644 index 0000000000..e32dab9d60 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.d.ts @@ -0,0 +1,5 @@ +import { SpanContext } from './span_context'; +export declare const INVALID_SPANID = "0000000000000000"; +export declare const INVALID_TRACEID = "00000000000000000000000000000000"; +export declare const INVALID_SPAN_CONTEXT: SpanContext; +//# sourceMappingURL=invalid-span-constants.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js new file mode 100644 index 0000000000..ad0ab17e06 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js @@ -0,0 +1,27 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; +var trace_flags_1 = require("./trace_flags"); +exports.INVALID_SPANID = '0000000000000000'; +exports.INVALID_TRACEID = '00000000000000000000000000000000'; +exports.INVALID_SPAN_CONTEXT = { + traceId: exports.INVALID_TRACEID, + spanId: exports.INVALID_SPANID, + traceFlags: trace_flags_1.TraceFlags.NONE, +}; +//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map new file mode 100644 index 0000000000..ecc6741031 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js.map @@ -0,0 +1 @@ +{"version":3,"file":"invalid-span-constants.js","sourceRoot":"","sources":["../../../src/trace/invalid-span-constants.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAGH,6CAA2C;AAE9B,QAAA,cAAc,GAAG,kBAAkB,CAAC;AACpC,QAAA,eAAe,GAAG,kCAAkC,CAAC;AACrD,QAAA,oBAAoB,GAAgB;IAC/C,OAAO,EAAE,uBAAe;IACxB,MAAM,EAAE,sBAAc;IACtB,UAAU,EAAE,wBAAU,CAAC,IAAI;CAC5B,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanContext } from './span_context';\nimport { TraceFlags } from './trace_flags';\n\nexport const INVALID_SPANID = '0000000000000000';\nexport const INVALID_TRACEID = '00000000000000000000000000000000';\nexport const INVALID_SPAN_CONTEXT: SpanContext = {\n traceId: INVALID_TRACEID,\n spanId: INVALID_SPANID,\n traceFlags: TraceFlags.NONE,\n};\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.d.ts b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts new file mode 100644 index 0000000000..d1326f5156 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.d.ts @@ -0,0 +1,24 @@ +import { SpanAttributes } from './attributes'; +import { SpanContext } from './span_context'; +/** + * A pointer from the current {@link Span} to another span in the same trace or + * in a different trace. + * Few examples of Link usage. + * 1. Batch Processing: A batch of elements may contain elements associated + * with one or more traces/spans. Since there can only be one parent + * SpanContext, Link is used to keep reference to SpanContext of all + * elements in the batch. + * 2. Public Endpoint: A SpanContext in incoming client request on a public + * endpoint is untrusted from service provider perspective. In such case it + * is advisable to start a new trace with appropriate sampling decision. + * However, it is desirable to associate incoming SpanContext to new trace + * initiated on service provider side so two traces (from Client and from + * Service Provider) can be correlated. + */ +export interface Link { + /** The {@link SpanContext} of a linked span. */ + context: SpanContext; + /** A set of {@link SpanAttributes} on the link. */ + attributes?: SpanAttributes; +} +//# sourceMappingURL=link.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js b/node_modules/@opentelemetry/api/build/src/trace/link.js new file mode 100644 index 0000000000..8036a6346d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js.map b/node_modules/@opentelemetry/api/build/src/trace/link.js.map new file mode 100644 index 0000000000..18c0a81308 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/link.js.map @@ -0,0 +1 @@ +{"version":3,"file":"link.js","sourceRoot":"","sources":["../../../src/trace/link.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { SpanAttributes } from './attributes';\nimport { SpanContext } from './span_context';\n\n/**\n * A pointer from the current {@link Span} to another span in the same trace or\n * in a different trace.\n * Few examples of Link usage.\n * 1. Batch Processing: A batch of elements may contain elements associated\n * with one or more traces/spans. Since there can only be one parent\n * SpanContext, Link is used to keep reference to SpanContext of all\n * elements in the batch.\n * 2. Public Endpoint: A SpanContext in incoming client request on a public\n * endpoint is untrusted from service provider perspective. In such case it\n * is advisable to start a new trace with appropriate sampling decision.\n * However, it is desirable to associate incoming SpanContext to new trace\n * initiated on service provider side so two traces (from Client and from\n * Service Provider) can be correlated.\n */\nexport interface Link {\n /** The {@link SpanContext} of a linked span. */\n context: SpanContext;\n /** A set of {@link SpanAttributes} on the link. */\n attributes?: SpanAttributes;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts new file mode 100644 index 0000000000..c5f28141c0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.d.ts @@ -0,0 +1,101 @@ +import { Exception } from '../common/Exception'; +import { TimeInput } from '../common/Time'; +import { SpanAttributes, SpanAttributeValue } from './attributes'; +import { SpanContext } from './span_context'; +import { SpanStatus } from './status'; +/** + * An interface that represents a span. A span represents a single operation + * within a trace. Examples of span might include remote procedure calls or a + * in-process function calls to sub-components. A Trace has a single, top-level + * "root" Span that in turn may have zero or more child Spans, which in turn + * may have children. + * + * Spans are created by the {@link Tracer.startSpan} method. + */ +export interface Span { + /** + * Returns the {@link SpanContext} object associated with this Span. + * + * Get an immutable, serializable identifier for this span that can be used + * to create new child spans. Returned SpanContext is usable even after the + * span ends. + * + * @returns the SpanContext object associated with this Span. + */ + spanContext(): SpanContext; + /** + * Sets an attribute to the span. + * + * Sets a single Attribute with the key and value passed as arguments. + * + * @param key the key for this attribute. + * @param value the value for this attribute. Setting a value null or + * undefined is invalid and will result in undefined behavior. + */ + setAttribute(key: string, value: SpanAttributeValue): this; + /** + * Sets attributes to the span. + * + * @param attributes the attributes that will be added. + * null or undefined attribute values + * are invalid and will result in undefined behavior. + */ + setAttributes(attributes: SpanAttributes): this; + /** + * Adds an event to the Span. + * + * @param name the name of the event. + * @param [attributesOrStartTime] the attributes that will be added; these are + * associated with this event. Can be also a start time + * if type is {@type TimeInput} and 3rd param is undefined + * @param [startTime] start time of the event. + */ + addEvent(name: string, attributesOrStartTime?: SpanAttributes | TimeInput, startTime?: TimeInput): this; + /** + * Sets a status to the span. If used, this will override the default Span + * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value + * of previous calls to SetStatus on the Span. + * + * @param status the SpanStatus to set. + */ + setStatus(status: SpanStatus): this; + /** + * Updates the Span name. + * + * This will override the name provided via {@link Tracer.startSpan}. + * + * Upon this update, any sampling behavior based on Span name will depend on + * the implementation. + * + * @param name the Span name. + */ + updateName(name: string): this; + /** + * Marks the end of Span execution. + * + * Call to End of a Span MUST not have any effects on child spans. Those may + * still be running and can be ended later. + * + * Do not return `this`. The Span generally should not be used after it + * is ended so chaining is not desired in this context. + * + * @param [endTime] the time to set as Span's end time. If not provided, + * use the current time as the span's end time. + */ + end(endTime?: TimeInput): void; + /** + * Returns the flag whether this span will be recorded. + * + * @returns true if this Span is active and recording information like events + * with the `AddEvent` operation and attributes using `setAttributes`. + */ + isRecording(): boolean; + /** + * Sets exception as a span event + * @param exception the exception the only accepted values are string or Error + * @param [time] the time to set as Span's event time. If not provided, + * use the current time. + */ + recordException(exception: Exception, time?: TimeInput): void; +} +//# sourceMappingURL=span.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js b/node_modules/@opentelemetry/api/build/src/trace/span.js new file mode 100644 index 0000000000..b50af46270 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js.map b/node_modules/@opentelemetry/api/build/src/trace/span.js.map new file mode 100644 index 0000000000..f24165b769 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span.js","sourceRoot":"","sources":["../../../src/trace/span.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Exception } from '../common/Exception';\nimport { TimeInput } from '../common/Time';\nimport { SpanAttributes, SpanAttributeValue } from './attributes';\nimport { SpanContext } from './span_context';\nimport { SpanStatus } from './status';\n\n/**\n * An interface that represents a span. A span represents a single operation\n * within a trace. Examples of span might include remote procedure calls or a\n * in-process function calls to sub-components. A Trace has a single, top-level\n * \"root\" Span that in turn may have zero or more child Spans, which in turn\n * may have children.\n *\n * Spans are created by the {@link Tracer.startSpan} method.\n */\nexport interface Span {\n /**\n * Returns the {@link SpanContext} object associated with this Span.\n *\n * Get an immutable, serializable identifier for this span that can be used\n * to create new child spans. Returned SpanContext is usable even after the\n * span ends.\n *\n * @returns the SpanContext object associated with this Span.\n */\n spanContext(): SpanContext;\n\n /**\n * Sets an attribute to the span.\n *\n * Sets a single Attribute with the key and value passed as arguments.\n *\n * @param key the key for this attribute.\n * @param value the value for this attribute. Setting a value null or\n * undefined is invalid and will result in undefined behavior.\n */\n setAttribute(key: string, value: SpanAttributeValue): this;\n\n /**\n * Sets attributes to the span.\n *\n * @param attributes the attributes that will be added.\n * null or undefined attribute values\n * are invalid and will result in undefined behavior.\n */\n setAttributes(attributes: SpanAttributes): this;\n\n /**\n * Adds an event to the Span.\n *\n * @param name the name of the event.\n * @param [attributesOrStartTime] the attributes that will be added; these are\n * associated with this event. Can be also a start time\n * if type is {@type TimeInput} and 3rd param is undefined\n * @param [startTime] start time of the event.\n */\n addEvent(\n name: string,\n attributesOrStartTime?: SpanAttributes | TimeInput,\n startTime?: TimeInput\n ): this;\n\n /**\n * Sets a status to the span. If used, this will override the default Span\n * status. Default is {@link SpanStatusCode.UNSET}. SetStatus overrides the value\n * of previous calls to SetStatus on the Span.\n *\n * @param status the SpanStatus to set.\n */\n setStatus(status: SpanStatus): this;\n\n /**\n * Updates the Span name.\n *\n * This will override the name provided via {@link Tracer.startSpan}.\n *\n * Upon this update, any sampling behavior based on Span name will depend on\n * the implementation.\n *\n * @param name the Span name.\n */\n updateName(name: string): this;\n\n /**\n * Marks the end of Span execution.\n *\n * Call to End of a Span MUST not have any effects on child spans. Those may\n * still be running and can be ended later.\n *\n * Do not return `this`. The Span generally should not be used after it\n * is ended so chaining is not desired in this context.\n *\n * @param [endTime] the time to set as Span's end time. If not provided,\n * use the current time as the span's end time.\n */\n end(endTime?: TimeInput): void;\n\n /**\n * Returns the flag whether this span will be recorded.\n *\n * @returns true if this Span is active and recording information like events\n * with the `AddEvent` operation and attributes using `setAttributes`.\n */\n isRecording(): boolean;\n\n /**\n * Sets exception as a span event\n * @param exception the exception the only accepted values are string or Error\n * @param [time] the time to set as Span's event time. If not provided,\n * use the current time.\n */\n recordException(exception: Exception, time?: TimeInput): void;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts new file mode 100644 index 0000000000..f30933a1fb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.d.ts @@ -0,0 +1,53 @@ +import { TraceState } from './trace_state'; +/** + * A SpanContext represents the portion of a {@link Span} which must be + * serialized and propagated along side of a {@link Baggage}. + */ +export interface SpanContext { + /** + * The ID of the trace that this span belongs to. It is worldwide unique + * with practically sufficient probability by being made as 16 randomly + * generated bytes, encoded as a 32 lowercase hex characters corresponding to + * 128 bits. + */ + traceId: string; + /** + * The ID of the Span. It is globally unique with practically sufficient + * probability by being made as 8 randomly generated bytes, encoded as a 16 + * lowercase hex characters corresponding to 64 bits. + */ + spanId: string; + /** + * Only true if the SpanContext was propagated from a remote parent. + */ + isRemote?: boolean; + /** + * Trace flags to propagate. + * + * It is represented as 1 byte (bitmap). Bit to represent whether trace is + * sampled or not. When set, the least significant bit documents that the + * caller may have recorded trace data. A caller who does not record trace + * data out-of-band leaves this flag unset. + * + * see {@link TraceFlags} for valid flag values. + */ + traceFlags: number; + /** + * Tracing-system-specific info to propagate. + * + * The tracestate field value is a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * More Info: https://www.w3.org/TR/trace-context/#tracestate-field + * + * Examples: + * Single tracing system (generic format): + * tracestate: rojo=00f067aa0ba902b7 + * Multiple tracing systems (with different formatting): + * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE + */ + traceState?: TraceState; +} +//# sourceMappingURL=span_context.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js b/node_modules/@opentelemetry/api/build/src/trace/span_context.js new file mode 100644 index 0000000000..4b7976ce70 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map new file mode 100644 index 0000000000..005386d689 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_context.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_context.js","sourceRoot":"","sources":["../../../src/trace/span_context.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { TraceState } from './trace_state';\n\n/**\n * A SpanContext represents the portion of a {@link Span} which must be\n * serialized and propagated along side of a {@link Baggage}.\n */\nexport interface SpanContext {\n /**\n * The ID of the trace that this span belongs to. It is worldwide unique\n * with practically sufficient probability by being made as 16 randomly\n * generated bytes, encoded as a 32 lowercase hex characters corresponding to\n * 128 bits.\n */\n traceId: string;\n /**\n * The ID of the Span. It is globally unique with practically sufficient\n * probability by being made as 8 randomly generated bytes, encoded as a 16\n * lowercase hex characters corresponding to 64 bits.\n */\n spanId: string;\n /**\n * Only true if the SpanContext was propagated from a remote parent.\n */\n isRemote?: boolean;\n /**\n * Trace flags to propagate.\n *\n * It is represented as 1 byte (bitmap). Bit to represent whether trace is\n * sampled or not. When set, the least significant bit documents that the\n * caller may have recorded trace data. A caller who does not record trace\n * data out-of-band leaves this flag unset.\n *\n * see {@link TraceFlags} for valid flag values.\n */\n traceFlags: number;\n /**\n * Tracing-system-specific info to propagate.\n *\n * The tracestate field value is a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n * More Info: https://www.w3.org/TR/trace-context/#tracestate-field\n *\n * Examples:\n * Single tracing system (generic format):\n * tracestate: rojo=00f067aa0ba902b7\n * Multiple tracing systems (with different formatting):\n * tracestate: rojo=00f067aa0ba902b7,congo=t61rcWkgMzE\n */\n traceState?: TraceState;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts new file mode 100644 index 0000000000..a89846f656 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.d.ts @@ -0,0 +1,27 @@ +export declare enum SpanKind { + /** Default value. Indicates that the span is used internally. */ + INTERNAL = 0, + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SERVER = 1, + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + CLIENT = 2, + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + PRODUCER = 3, + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + CONSUMER = 4 +} +//# sourceMappingURL=span_kind.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js new file mode 100644 index 0000000000..9c06e2c255 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanKind = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var SpanKind; +(function (SpanKind) { + /** Default value. Indicates that the span is used internally. */ + SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; + /** + * Indicates that the span covers server-side handling of an RPC or other + * remote request. + */ + SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; + /** + * Indicates that the span covers the client-side wrapper around an RPC or + * other remote request. + */ + SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; + /** + * Indicates that the span describes producer sending a message to a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; + /** + * Indicates that the span describes consumer receiving a message from a + * broker. Unlike client and server, there is no direct critical path latency + * relationship between producer and consumer spans. + */ + SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; +})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); +//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map new file mode 100644 index 0000000000..c0ba360f14 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js.map @@ -0,0 +1 @@ +{"version":3,"file":"span_kind.js","sourceRoot":"","sources":["../../../src/trace/span_kind.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,QA6BX;AA7BD,WAAY,QAAQ;IAClB,iEAAiE;IACjE,+CAAY,CAAA;IAEZ;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;OAGG;IACH,2CAAU,CAAA;IAEV;;;;OAIG;IACH,+CAAY,CAAA;IAEZ;;;;OAIG;IACH,+CAAY,CAAA;AACd,CAAC,EA7BW,QAAQ,GAAR,gBAAQ,KAAR,gBAAQ,QA6BnB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum SpanKind {\n /** Default value. Indicates that the span is used internally. */\n INTERNAL = 0,\n\n /**\n * Indicates that the span covers server-side handling of an RPC or other\n * remote request.\n */\n SERVER = 1,\n\n /**\n * Indicates that the span covers the client-side wrapper around an RPC or\n * other remote request.\n */\n CLIENT = 2,\n\n /**\n * Indicates that the span describes producer sending a message to a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n PRODUCER = 3,\n\n /**\n * Indicates that the span describes consumer receiving a message from a\n * broker. Unlike client and server, there is no direct critical path latency\n * relationship between producer and consumer spans.\n */\n CONSUMER = 4,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts new file mode 100644 index 0000000000..f191111487 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.d.ts @@ -0,0 +1,17 @@ +import { Span } from './span'; +import { SpanContext } from './span_context'; +export declare function isValidTraceId(traceId: string): boolean; +export declare function isValidSpanId(spanId: string): boolean; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +export declare function isSpanContextValid(spanContext: SpanContext): boolean; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +export declare function wrapSpanContext(spanContext: SpanContext): Span; +//# sourceMappingURL=spancontext-utils.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js new file mode 100644 index 0000000000..2c94969b5b --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var invalid_span_constants_1 = require("./invalid-span-constants"); +var NonRecordingSpan_1 = require("./NonRecordingSpan"); +var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; +var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; +function isValidTraceId(traceId) { + return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; +} +exports.isValidTraceId = isValidTraceId; +function isValidSpanId(spanId) { + return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; +} +exports.isValidSpanId = isValidSpanId; +/** + * Returns true if this {@link SpanContext} is valid. + * @return true if this {@link SpanContext} is valid. + */ +function isSpanContextValid(spanContext) { + return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); +} +exports.isSpanContextValid = isSpanContextValid; +/** + * Wrap the given {@link SpanContext} in a new non-recording {@link Span} + * + * @param spanContext span context to be wrapped + * @returns a new non-recording {@link Span} with the provided context + */ +function wrapSpanContext(spanContext) { + return new NonRecordingSpan_1.NonRecordingSpan(spanContext); +} +exports.wrapSpanContext = wrapSpanContext; +//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map new file mode 100644 index 0000000000..45374a10b1 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js.map @@ -0,0 +1 @@ +{"version":3,"file":"spancontext-utils.js","sourceRoot":"","sources":["../../../src/trace/spancontext-utils.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,mEAA2E;AAC3E,uDAAsD;AAItD,IAAM,mBAAmB,GAAG,mBAAmB,CAAC;AAChD,IAAM,kBAAkB,GAAG,iBAAiB,CAAC;AAE7C,SAAgB,cAAc,CAAC,OAAe;IAC5C,OAAO,mBAAmB,CAAC,IAAI,CAAC,OAAO,CAAC,IAAI,OAAO,KAAK,wCAAe,CAAC;AAC1E,CAAC;AAFD,wCAEC;AAED,SAAgB,aAAa,CAAC,MAAc;IAC1C,OAAO,kBAAkB,CAAC,IAAI,CAAC,MAAM,CAAC,IAAI,MAAM,KAAK,uCAAc,CAAC;AACtE,CAAC;AAFD,sCAEC;AAED;;;GAGG;AACH,SAAgB,kBAAkB,CAAC,WAAwB;IACzD,OAAO,CACL,cAAc,CAAC,WAAW,CAAC,OAAO,CAAC,IAAI,aAAa,CAAC,WAAW,CAAC,MAAM,CAAC,CACzE,CAAC;AACJ,CAAC;AAJD,gDAIC;AAED;;;;;GAKG;AACH,SAAgB,eAAe,CAAC,WAAwB;IACtD,OAAO,IAAI,mCAAgB,CAAC,WAAW,CAAC,CAAC;AAC3C,CAAC;AAFD,0CAEC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants';\nimport { NonRecordingSpan } from './NonRecordingSpan';\nimport { Span } from './span';\nimport { SpanContext } from './span_context';\n\nconst VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i;\nconst VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i;\n\nexport function isValidTraceId(traceId: string): boolean {\n return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID;\n}\n\nexport function isValidSpanId(spanId: string): boolean {\n return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID;\n}\n\n/**\n * Returns true if this {@link SpanContext} is valid.\n * @return true if this {@link SpanContext} is valid.\n */\nexport function isSpanContextValid(spanContext: SpanContext): boolean {\n return (\n isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)\n );\n}\n\n/**\n * Wrap the given {@link SpanContext} in a new non-recording {@link Span}\n *\n * @param spanContext span context to be wrapped\n * @returns a new non-recording {@link Span} with the provided context\n */\nexport function wrapSpanContext(spanContext: SpanContext): Span {\n return new NonRecordingSpan(spanContext);\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.d.ts b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts new file mode 100644 index 0000000000..ab19a68f72 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.d.ts @@ -0,0 +1,25 @@ +export interface SpanStatus { + /** The status code of this message. */ + code: SpanStatusCode; + /** A developer-facing error message. */ + message?: string; +} +/** + * An enumeration of status codes. + */ +export declare enum SpanStatusCode { + /** + * The default status. + */ + UNSET = 0, + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + OK = 1, + /** + * The operation contains an error. + */ + ERROR = 2 +} +//# sourceMappingURL=status.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js b/node_modules/@opentelemetry/api/build/src/trace/status.js new file mode 100644 index 0000000000..50cbdef8e0 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SpanStatusCode = void 0; +/** + * An enumeration of status codes. + */ +var SpanStatusCode; +(function (SpanStatusCode) { + /** + * The default status. + */ + SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; + /** + * The operation has been validated by an Application developer or + * Operator to have completed successfully. + */ + SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; + /** + * The operation contains an error. + */ + SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; +})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); +//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js.map b/node_modules/@opentelemetry/api/build/src/trace/status.js.map new file mode 100644 index 0000000000..b921cae589 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/status.js.map @@ -0,0 +1 @@ +{"version":3,"file":"status.js","sourceRoot":"","sources":["../../../src/trace/status.ts"],"names":[],"mappings":";;;AAsBA;;GAEG;AACH,IAAY,cAcX;AAdD,WAAY,cAAc;IACxB;;OAEG;IACH,qDAAS,CAAA;IACT;;;OAGG;IACH,+CAAM,CAAA;IACN;;OAEG;IACH,qDAAS,CAAA;AACX,CAAC,EAdW,cAAc,GAAd,sBAAc,KAAd,sBAAc,QAczB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport interface SpanStatus {\n /** The status code of this message. */\n code: SpanStatusCode;\n /** A developer-facing error message. */\n message?: string;\n}\n\n/**\n * An enumeration of status codes.\n */\nexport enum SpanStatusCode {\n /**\n * The default status.\n */\n UNSET = 0,\n /**\n * The operation has been validated by an Application developer or\n * Operator to have completed successfully.\n */\n OK = 1,\n /**\n * The operation contains an error.\n */\n ERROR = 2,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts new file mode 100644 index 0000000000..11288ba95a --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.d.ts @@ -0,0 +1,7 @@ +export declare enum TraceFlags { + /** Represents no flag set. */ + NONE = 0, + /** Bit to represent whether trace is sampled in trace flags. */ + SAMPLED = 1 +} +//# sourceMappingURL=trace_flags.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js new file mode 100644 index 0000000000..f8d4dd8a99 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TraceFlags = void 0; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +var TraceFlags; +(function (TraceFlags) { + /** Represents no flag set. */ + TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; + /** Bit to represent whether trace is sampled in trace flags. */ + TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; +})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); +//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map new file mode 100644 index 0000000000..965ce7945d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_flags.js","sourceRoot":"","sources":["../../../src/trace/trace_flags.ts"],"names":[],"mappings":";;;AAAA;;;;;;;;;;;;;;GAcG;AACH,IAAY,UAKX;AALD,WAAY,UAAU;IACpB,8BAA8B;IAC9B,2CAAU,CAAA;IACV,gEAAgE;IAChE,iDAAkB,CAAA;AACpB,CAAC,EALW,UAAU,GAAV,kBAAU,KAAV,kBAAU,QAKrB","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport enum TraceFlags {\n /** Represents no flag set. */\n NONE = 0x0,\n /** Bit to represent whether trace is sampled in trace flags. */\n SAMPLED = 0x1 << 0,\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts new file mode 100644 index 0000000000..f275b8bed6 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.d.ts @@ -0,0 +1,38 @@ +export interface TraceState { + /** + * Create a new TraceState which inherits from this TraceState and has the + * given key set. + * The new entry will always be added in the front of the list of states. + * + * @param key key of the TraceState entry. + * @param value value of the TraceState entry. + */ + set(key: string, value: string): TraceState; + /** + * Return a new TraceState which inherits from this TraceState but does not + * contain the given key. + * + * @param key the key for the TraceState entry to be removed. + */ + unset(key: string): TraceState; + /** + * Returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + * + * @param key with which the specified value is to be associated. + * @returns the value to which the specified key is mapped, or `undefined` if + * this map contains no mapping for the key. + */ + get(key: string): string | undefined; + /** + * Serializes the TraceState to a `list` as defined below. The `list` is a + * series of `list-members` separated by commas `,`, and a list-member is a + * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs + * surrounding `list-members` are ignored. There can be a maximum of 32 + * `list-members` in a `list`. + * + * @returns the serialized string. + */ + serialize(): string; +} +//# sourceMappingURL=trace_state.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js new file mode 100644 index 0000000000..139703815d --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map new file mode 100644 index 0000000000..267f2e8205 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js.map @@ -0,0 +1 @@ +{"version":3,"file":"trace_state.js","sourceRoot":"","sources":["../../../src/trace/trace_state.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nexport interface TraceState {\n /**\n * Create a new TraceState which inherits from this TraceState and has the\n * given key set.\n * The new entry will always be added in the front of the list of states.\n *\n * @param key key of the TraceState entry.\n * @param value value of the TraceState entry.\n */\n set(key: string, value: string): TraceState;\n\n /**\n * Return a new TraceState which inherits from this TraceState but does not\n * contain the given key.\n *\n * @param key the key for the TraceState entry to be removed.\n */\n unset(key: string): TraceState;\n\n /**\n * Returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n *\n * @param key with which the specified value is to be associated.\n * @returns the value to which the specified key is mapped, or `undefined` if\n * this map contains no mapping for the key.\n */\n get(key: string): string | undefined;\n\n // TODO: Consider to add support for merging an object as well by also\n // accepting a single internalTraceState argument similar to the constructor.\n\n /**\n * Serializes the TraceState to a `list` as defined below. The `list` is a\n * series of `list-members` separated by commas `,`, and a list-member is a\n * key/value pair separated by an equals sign `=`. Spaces and horizontal tabs\n * surrounding `list-members` are ignored. There can be a maximum of 32\n * `list-members` in a `list`.\n *\n * @returns the serialized string.\n */\n serialize(): string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts new file mode 100644 index 0000000000..25090899e8 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.d.ts @@ -0,0 +1,71 @@ +import { Context } from '../context/types'; +import { Span } from './span'; +import { SpanOptions } from './SpanOptions'; +/** + * Tracer provides an interface for creating {@link Span}s. + */ +export interface Tracer { + /** + * Starts a new {@link Span}. Start the span without setting it on context. + * + * This method do NOT modify the current Context. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @returns Span The newly created span + * @example + * const span = tracer.startSpan('op'); + * span.setAttribute('key', 'value'); + * span.end(); + */ + startSpan(name: string, options?: SpanOptions, context?: Context): Span; + /** + * Starts a new {@link Span} and calls the given function passing it the + * created span as first argument. + * Additionally the new span gets set in context and this context is activated + * for the duration of the function call. + * + * @param name The name of the span + * @param [options] SpanOptions used for span creation + * @param [context] Context to use to extract parent + * @param fn function called in the context of the span and receives the newly created span as an argument + * @returns return value of fn + * @example + * const something = tracer.startActiveSpan('op', span => { + * try { + * do some work + * span.setStatus({code: SpanStatusCode.OK}); + * return something; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } finally { + * span.end(); + * } + * }); + * + * @example + * const span = tracer.startActiveSpan('op', span => { + * try { + * do some work + * return span; + * } catch (err) { + * span.setStatus({ + * code: SpanStatusCode.ERROR, + * message: err.message, + * }); + * throw err; + * } + * }); + * do some more work + * span.end(); + */ + startActiveSpan unknown>(name: string, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, fn: F): ReturnType; + startActiveSpan unknown>(name: string, options: SpanOptions, context: Context, fn: F): ReturnType; +} +//# sourceMappingURL=tracer.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js b/node_modules/@opentelemetry/api/build/src/trace/tracer.js new file mode 100644 index 0000000000..d710ef9acb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map new file mode 100644 index 0000000000..3c3c591a70 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer.js","sourceRoot":"","sources":["../../../src/trace/tracer.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Context } from '../context/types';\nimport { Span } from './span';\nimport { SpanOptions } from './SpanOptions';\n\n/**\n * Tracer provides an interface for creating {@link Span}s.\n */\nexport interface Tracer {\n /**\n * Starts a new {@link Span}. Start the span without setting it on context.\n *\n * This method do NOT modify the current Context.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @returns Span The newly created span\n * @example\n * const span = tracer.startSpan('op');\n * span.setAttribute('key', 'value');\n * span.end();\n */\n startSpan(name: string, options?: SpanOptions, context?: Context): Span;\n\n /**\n * Starts a new {@link Span} and calls the given function passing it the\n * created span as first argument.\n * Additionally the new span gets set in context and this context is activated\n * for the duration of the function call.\n *\n * @param name The name of the span\n * @param [options] SpanOptions used for span creation\n * @param [context] Context to use to extract parent\n * @param fn function called in the context of the span and receives the newly created span as an argument\n * @returns return value of fn\n * @example\n * const something = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * span.setStatus({code: SpanStatusCode.OK});\n * return something;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * } finally {\n * span.end();\n * }\n * });\n *\n * @example\n * const span = tracer.startActiveSpan('op', span => {\n * try {\n * do some work\n * return span;\n * } catch (err) {\n * span.setStatus({\n * code: SpanStatusCode.ERROR,\n * message: err.message,\n * });\n * throw err;\n * }\n * });\n * do some more work\n * span.end();\n */\n startActiveSpan unknown>(\n name: string,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n fn: F\n ): ReturnType;\n startActiveSpan unknown>(\n name: string,\n options: SpanOptions,\n context: Context,\n fn: F\n ): ReturnType;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts new file mode 100644 index 0000000000..f3bbccfc25 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.d.ts @@ -0,0 +1,10 @@ +/** + * An interface describes additional metadata of a tracer. + */ +export interface TracerOptions { + /** + * The schemaUrl of the tracer or instrumentation library + */ + schemaUrl?: string; +} +//# sourceMappingURL=tracer_options.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js new file mode 100644 index 0000000000..3547251a27 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map new file mode 100644 index 0000000000..a743252796 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_options.js","sourceRoot":"","sources":["../../../src/trace/tracer_options.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n/**\n * An interface describes additional metadata of a tracer.\n */\nexport interface TracerOptions {\n /**\n * The schemaUrl of the tracer or instrumentation library\n */\n schemaUrl?: string;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts new file mode 100644 index 0000000000..9b2f7a954c --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.d.ts @@ -0,0 +1,21 @@ +import { Tracer } from './tracer'; +import { TracerOptions } from './tracer_options'; +/** + * A registry for creating named {@link Tracer}s. + */ +export interface TracerProvider { + /** + * Returns a Tracer, creating one if one with the given name and version is + * not already created. + * + * This function may return different Tracer types (e.g. + * {@link NoopTracerProvider} vs. a functional tracer). + * + * @param name The name of the tracer or instrumentation library. + * @param version The version of the tracer or instrumentation library. + * @param options The options of the tracer or instrumentation library. + * @returns Tracer A Tracer with the given name and version + */ + getTracer(name: string, version?: string, options?: TracerOptions): Tracer; +} +//# sourceMappingURL=tracer_provider.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js new file mode 100644 index 0000000000..4c511db953 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js @@ -0,0 +1,18 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map new file mode 100644 index 0000000000..31f334aae7 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js.map @@ -0,0 +1 @@ +{"version":3,"file":"tracer_provider.js","sourceRoot":"","sources":["../../../src/trace/tracer_provider.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Tracer } from './tracer';\nimport { TracerOptions } from './tracer_options';\n\n/**\n * A registry for creating named {@link Tracer}s.\n */\nexport interface TracerProvider {\n /**\n * Returns a Tracer, creating one if one with the given name and version is\n * not already created.\n *\n * This function may return different Tracer types (e.g.\n * {@link NoopTracerProvider} vs. a functional tracer).\n *\n * @param name The name of the tracer or instrumentation library.\n * @param version The version of the tracer or instrumentation library.\n * @param options The options of the tracer or instrumentation library.\n * @returns Tracer A Tracer with the given name and version\n */\n getTracer(name: string, version?: string, options?: TracerOptions): Tracer;\n}\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.d.ts b/node_modules/@opentelemetry/api/build/src/version.d.ts new file mode 100644 index 0000000000..28a7146cad --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.d.ts @@ -0,0 +1,2 @@ +export declare const VERSION = "1.1.0"; +//# sourceMappingURL=version.d.ts.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js b/node_modules/@opentelemetry/api/build/src/version.js new file mode 100644 index 0000000000..ab2ae8e792 --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js @@ -0,0 +1,21 @@ +"use strict"; +/* + * Copyright The OpenTelemetry Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.VERSION = void 0; +// this is autogenerated file, see scripts/version-update.js +exports.VERSION = '1.1.0'; +//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js.map b/node_modules/@opentelemetry/api/build/src/version.js.map new file mode 100644 index 0000000000..6079687fbb --- /dev/null +++ b/node_modules/@opentelemetry/api/build/src/version.js.map @@ -0,0 +1 @@ +{"version":3,"file":"version.js","sourceRoot":"","sources":["../../src/version.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;GAcG;;;AAEH,4DAA4D;AAC/C,QAAA,OAAO,GAAG,OAAO,CAAC","sourcesContent":["/*\n * Copyright The OpenTelemetry Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * https://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// this is autogenerated file, see scripts/version-update.js\nexport const VERSION = '1.1.0';\n"]} \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/package.json b/node_modules/@opentelemetry/api/package.json new file mode 100644 index 0000000000..0bfdbc9a55 --- /dev/null +++ b/node_modules/@opentelemetry/api/package.json @@ -0,0 +1,90 @@ +{ + "name": "@opentelemetry/api", + "version": "1.1.0", + "description": "Public API for OpenTelemetry", + "main": "build/src/index.js", + "module": "build/esm/index.js", + "types": "build/src/index.d.ts", + "browser": { + "./src/platform/index.ts": "./src/platform/browser/index.ts", + "./build/esm/platform/index.js": "./build/esm/platform/browser/index.js", + "./build/src/platform/index.js": "./build/src/platform/browser/index.js" + }, + "repository": "https://github.com/open-telemetry/opentelemetry-js-api.git", + "scripts": { + "clean": "tsc --build --clean tsconfig.json tsconfig.esm.json", + "codecov:browser": "nyc report --reporter=json && codecov -f coverage/*.json -p .", + "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p .", + "precompile": "npm run version", + "compile": "tsc --build tsconfig.json tsconfig.esm.json", + "docs": "typedoc", + "docs:deploy": "gh-pages --dist docs/out", + "docs:test": "linkinator docs/out --silent && linkinator docs/*.md *.md --markdown --silent", + "lint:fix": "eslint src test --ext .ts --fix", + "lint": "eslint src test --ext .ts", + "test:browser": "nyc karma start --single-run", + "test": "nyc ts-mocha -p tsconfig.json test/**/*.test.ts", + "cycle-check": "dpdm --exit-code circular:1 src/index.ts", + "version": "node scripts/version-update.js", + "prewatch": "npm run version", + "watch": "tsc --build --watch" + }, + "keywords": [ + "opentelemetry", + "nodejs", + "browser", + "tracing", + "profiling", + "stats", + "monitoring" + ], + "author": "OpenTelemetry Authors", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + }, + "files": [ + "build/esm/**/*.js", + "build/esm/**/*.js.map", + "build/esm/**/*.d.ts", + "build/src/**/*.js", + "build/src/**/*.js.map", + "build/src/**/*.d.ts", + "LICENSE", + "README.md" + ], + "publishConfig": { + "access": "public" + }, + "devDependencies": { + "@types/mocha": "8.2.2", + "@types/node": "14.17.4", + "@types/sinon": "10.0.2", + "@types/webpack-env": "1.16.0", + "@typescript-eslint/eslint-plugin": "5.0.0", + "@typescript-eslint/parser": "5.0.0", + "codecov": "3.8.2", + "dpdm": "3.7.1", + "eslint": "7.32.0", + "eslint-plugin-header": "3.1.1", + "eslint-plugin-node": "11.1.0", + "gh-pages": "3.2.0", + "istanbul-instrumenter-loader": "3.0.1", + "karma": "5.2.3", + "karma-chrome-launcher": "3.1.0", + "karma-coverage-istanbul-reporter": "3.0.3", + "karma-mocha": "2.0.1", + "karma-spec-reporter": "0.0.32", + "karma-webpack": "4.0.2", + "lerna-changelog": "1.0.1", + "linkinator": "2.13.6", + "mocha": "7.2.0", + "nyc": "15.1.0", + "sinon": "11.1.1", + "ts-loader": "8.2.0", + "ts-mocha": "8.0.0", + "typedoc": "0.21.2", + "typescript": "4.3.5", + "webpack": "4.46.0" + } +} diff --git a/node_modules/@types/node-fetch/LICENSE b/node_modules/@types/node-fetch/LICENSE new file mode 100755 index 0000000000..9e841e7a26 --- /dev/null +++ b/node_modules/@types/node-fetch/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/node-fetch/README.md b/node_modules/@types/node-fetch/README.md new file mode 100755 index 0000000000..485e681f7d --- /dev/null +++ b/node_modules/@types/node-fetch/README.md @@ -0,0 +1,16 @@ +# Installation +> `npm install --save @types/node-fetch` + +# Summary +This package contains type definitions for node-fetch (https://github.com/bitinn/node-fetch). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch. + +### Additional Details + * Last updated: Wed, 15 Jun 2022 20:31:35 GMT + * Dependencies: [@types/form-data](https://npmjs.com/package/@types/form-data), [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [Torsten Werner](https://github.com/torstenwerner), [Niklas Lindgren](https://github.com/nikcorg), [Vinay Bedre](https://github.com/vinaybedre), [Antonio Román](https://github.com/kyranet), [Andrew Leedham](https://github.com/AndrewLeedham), [Jason Li](https://github.com/JasonLi914), [Steve Faulkner](https://github.com/southpolesteve), [ExE Boss](https://github.com/ExE-Boss), [Alex Savin](https://github.com/alexandrusavin), [Alexis Tyler](https://github.com/OmgImAlexis), [Jakub Kisielewski](https://github.com/kbkk), and [David Glasser](https://github.com/glasser). diff --git a/node_modules/@types/node-fetch/externals.d.ts b/node_modules/@types/node-fetch/externals.d.ts new file mode 100755 index 0000000000..8a1d0f8ed4 --- /dev/null +++ b/node_modules/@types/node-fetch/externals.d.ts @@ -0,0 +1,21 @@ +// `AbortSignal` is defined here to prevent a dependency on a particular +// implementation like the `abort-controller` package, and to avoid requiring +// the `dom` library in `tsconfig.json`. + +export interface AbortSignal { + aborted: boolean; + + addEventListener: (type: "abort", listener: ((this: AbortSignal, event: any) => any), options?: boolean | { + capture?: boolean | undefined, + once?: boolean | undefined, + passive?: boolean | undefined + }) => void; + + removeEventListener: (type: "abort", listener: ((this: AbortSignal, event: any) => any), options?: boolean | { + capture?: boolean | undefined + }) => void; + + dispatchEvent: (event: any) => boolean; + + onabort: null | ((this: AbortSignal, event: any) => any); +} diff --git a/node_modules/@types/node-fetch/index.d.ts b/node_modules/@types/node-fetch/index.d.ts new file mode 100755 index 0000000000..346d0b2d72 --- /dev/null +++ b/node_modules/@types/node-fetch/index.d.ts @@ -0,0 +1,224 @@ +// Type definitions for node-fetch 2.6 +// Project: https://github.com/bitinn/node-fetch +// Definitions by: Torsten Werner +// Niklas Lindgren +// Vinay Bedre +// Antonio Román +// Andrew Leedham +// Jason Li +// Steve Faulkner +// ExE Boss +// Alex Savin +// Alexis Tyler +// Jakub Kisielewski +// David Glasser +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// + +import FormData = require('form-data'); +import { RequestOptions } from "http"; +import { URLSearchParams, URL } from "url"; +import { AbortSignal } from "./externals"; + +export class Request extends Body { + constructor(input: RequestInfo, init?: RequestInit); + clone(): Request; + context: RequestContext; + headers: Headers; + method: string; + redirect: RequestRedirect; + referrer: string; + url: string; + + // node-fetch extensions to the whatwg/fetch spec + agent?: RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']); + compress: boolean; + counter: number; + follow: number; + hostname: string; + port?: number | undefined; + protocol: string; + size: number; + timeout: number; +} + +export interface RequestInit { + // whatwg/fetch standard options + body?: BodyInit | undefined; + headers?: HeadersInit | undefined; + method?: string | undefined; + redirect?: RequestRedirect | undefined; + signal?: AbortSignal | null | undefined; + + // node-fetch extensions + agent?: RequestOptions['agent'] | ((parsedUrl: URL) => RequestOptions['agent']); // =null http.Agent instance, allows custom proxy, certificate etc. + compress?: boolean | undefined; // =true support gzip/deflate content encoding. false to disable + follow?: number | undefined; // =20 maximum redirect count. 0 to not follow redirect + size?: number | undefined; // =0 maximum response body size in bytes. 0 to disable + timeout?: number | undefined; // =0 req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies) + + // node-fetch does not support mode, cache or credentials options +} + +export type RequestContext = + "audio" + | "beacon" + | "cspreport" + | "download" + | "embed" + | "eventsource" + | "favicon" + | "fetch" + | "font" + | "form" + | "frame" + | "hyperlink" + | "iframe" + | "image" + | "imageset" + | "import" + | "internal" + | "location" + | "manifest" + | "object" + | "ping" + | "plugin" + | "prefetch" + | "script" + | "serviceworker" + | "sharedworker" + | "style" + | "subresource" + | "track" + | "video" + | "worker" + | "xmlhttprequest" + | "xslt"; +export type RequestMode = "cors" | "no-cors" | "same-origin"; +export type RequestRedirect = "error" | "follow" | "manual"; +export type RequestCredentials = "omit" | "include" | "same-origin"; + +export type RequestCache = + "default" + | "force-cache" + | "no-cache" + | "no-store" + | "only-if-cached" + | "reload"; + +export class Headers implements Iterable<[string, string]> { + constructor(init?: HeadersInit); + forEach(callback: (value: string, name: string) => void): void; + append(name: string, value: string): void; + delete(name: string): void; + get(name: string): string | null; + has(name: string): boolean; + raw(): { [k: string]: string[] }; + set(name: string, value: string): void; + + // Iterable methods + entries(): IterableIterator<[string, string]>; + keys(): IterableIterator; + values(): IterableIterator; + [Symbol.iterator](): Iterator<[string, string]>; +} + +type BlobPart = ArrayBuffer | ArrayBufferView | Blob | string; + +interface BlobOptions { + type?: string | undefined; + endings?: "transparent" | "native" | undefined; +} + +export class Blob { + constructor(blobParts?: BlobPart[], options?: BlobOptions); + readonly type: string; + readonly size: number; + slice(start?: number, end?: number): Blob; + text(): Promise; +} + +export class Body { + constructor(body?: any, opts?: { size?: number | undefined; timeout?: number | undefined }); + arrayBuffer(): Promise; + blob(): Promise; + body: NodeJS.ReadableStream; + bodyUsed: boolean; + buffer(): Promise; + json(): Promise; + size: number; + text(): Promise; + textConverted(): Promise; + timeout: number; +} + +interface SystemError extends Error { + code?: string | undefined; +} + +export class FetchError extends Error { + name: "FetchError"; + constructor(message: string, type: string, systemError?: SystemError); + type: string; + code?: string | undefined; + errno?: string | undefined; +} + +export class Response extends Body { + constructor(body?: BodyInit, init?: ResponseInit); + static error(): Response; + static redirect(url: string, status: number): Response; + clone(): Response; + headers: Headers; + ok: boolean; + redirected: boolean; + status: number; + statusText: string; + type: ResponseType; + url: string; +} + +export type ResponseType = + "basic" + | "cors" + | "default" + | "error" + | "opaque" + | "opaqueredirect"; + +export interface ResponseInit { + headers?: HeadersInit | undefined; + size?: number | undefined; + status?: number | undefined; + statusText?: string | undefined; + timeout?: number | undefined; + url?: string | undefined; +} + +interface URLLike { + href: string; +} + +export type HeadersInit = Headers | string[][] | { [key: string]: string }; +// HeaderInit is exported to support backwards compatibility. See PR #34382 +export type HeaderInit = HeadersInit; +export type BodyInit = + ArrayBuffer + | ArrayBufferView + | NodeJS.ReadableStream + | string + | URLSearchParams + | FormData; +export type RequestInfo = string | URLLike | Request; + +declare function fetch( + url: RequestInfo, + init?: RequestInit +): Promise; + +declare namespace fetch { + function isRedirect(code: number): boolean; +} + +export default fetch; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/License b/node_modules/@types/node-fetch/node_modules/form-data/License new file mode 100644 index 0000000000..c7ff12a2f8 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak new file mode 100644 index 0000000000..f06d86cb31 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/README.md.bak @@ -0,0 +1,356 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v3.0.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/Readme.md b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md new file mode 100644 index 0000000000..f06d86cb31 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/Readme.md @@ -0,0 +1,356 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=linux:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=macos:6.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v3.0.1.svg?label=windows:6.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v3.0.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Headers_ getHeaders( [**Headers** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Void_ setBoundary()](https://github.com/form-data/form-data#void-setboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Headers_ getHeaders( [**Headers** _userHeaders_] ) +This method adds the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. By default, the boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` + +#### _Void_ setBoundary(String _boundary_) +Set the boundary string, overriding the default behavior described above. + +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. +- Starting version `3.x` FormData has dropped support for `node@4.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts new file mode 100644 index 0000000000..295e9e9bc5 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/index.d.ts @@ -0,0 +1,62 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +// Extracted because @types/node doesn't export interfaces. +interface ReadableOptions { + highWaterMark?: number; + encoding?: string; + objectMode?: boolean; + read?(this: stream.Readable, size: number): void; + destroy?(this: stream.Readable, error: Error | null, callback: (error: Error | null) => void): void; + autoDestroy?: boolean; +} + +interface Options extends ReadableOptions { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(userHeaders?: FormData.Headers): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + setBoundary(boundary: string): void; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000000..09e7c70e6e --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000000..cf836b0b65 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/form_data.js @@ -0,0 +1,498 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(options); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.setBoundary = function(boundary) { + this._boundary = boundary; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + var onResponse; + + var callback = function (error, responce) { + request.removeListener('error', callback); + request.removeListener('response', onResponse); + + return cb.call(this, error, responce); + }; + + onResponse = callback.bind(this, null); + + request.on('error', callback); + request.on('response', onResponse); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000000..4d35738dd5 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/@types/node-fetch/node_modules/form-data/package.json b/node_modules/@types/node-fetch/node_modules/form-data/package.json new file mode 100644 index 0000000000..a2fcb88d55 --- /dev/null +++ b/node_modules/@types/node-fetch/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "3.0.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 6" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "puppeteer": "^1.19.0", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/@types/node-fetch/package.json b/node_modules/@types/node-fetch/package.json new file mode 100755 index 0000000000..3ef5b0231a --- /dev/null +++ b/node_modules/@types/node-fetch/package.json @@ -0,0 +1,83 @@ +{ + "name": "@types/node-fetch", + "version": "2.6.2", + "description": "TypeScript definitions for node-fetch", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node-fetch", + "license": "MIT", + "contributors": [ + { + "name": "Torsten Werner", + "url": "https://github.com/torstenwerner", + "githubUsername": "torstenwerner" + }, + { + "name": "Niklas Lindgren", + "url": "https://github.com/nikcorg", + "githubUsername": "nikcorg" + }, + { + "name": "Vinay Bedre", + "url": "https://github.com/vinaybedre", + "githubUsername": "vinaybedre" + }, + { + "name": "Antonio Román", + "url": "https://github.com/kyranet", + "githubUsername": "kyranet" + }, + { + "name": "Andrew Leedham", + "url": "https://github.com/AndrewLeedham", + "githubUsername": "AndrewLeedham" + }, + { + "name": "Jason Li", + "url": "https://github.com/JasonLi914", + "githubUsername": "JasonLi914" + }, + { + "name": "Steve Faulkner", + "url": "https://github.com/southpolesteve", + "githubUsername": "southpolesteve" + }, + { + "name": "ExE Boss", + "url": "https://github.com/ExE-Boss", + "githubUsername": "ExE-Boss" + }, + { + "name": "Alex Savin", + "url": "https://github.com/alexandrusavin", + "githubUsername": "alexandrusavin" + }, + { + "name": "Alexis Tyler", + "url": "https://github.com/OmgImAlexis", + "githubUsername": "OmgImAlexis" + }, + { + "name": "Jakub Kisielewski", + "url": "https://github.com/kbkk", + "githubUsername": "kbkk" + }, + { + "name": "David Glasser", + "url": "https://github.com/glasser", + "githubUsername": "glasser" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/node-fetch" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*", + "form-data": "^3.0.0" + }, + "typesPublisherContentHash": "f539217db3abcaeb4bea994aff274ddc22cfba7cf3758c120545a305f7437942", + "typeScriptVersion": "4.0" +} \ No newline at end of file diff --git a/node_modules/@types/tunnel/LICENSE b/node_modules/@types/tunnel/LICENSE new file mode 100755 index 0000000000..9e841e7a26 --- /dev/null +++ b/node_modules/@types/tunnel/LICENSE @@ -0,0 +1,21 @@ + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/node_modules/@types/tunnel/README.md b/node_modules/@types/tunnel/README.md new file mode 100755 index 0000000000..a2d8b61899 --- /dev/null +++ b/node_modules/@types/tunnel/README.md @@ -0,0 +1,67 @@ +# Installation +> `npm install --save @types/tunnel` + +# Summary +This package contains type definitions for tunnel (https://github.com/koichik/node-tunnel/). + +# Details +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel/index.d.ts) +````ts +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +```` + +### Additional Details + * Last updated: Fri, 02 Jul 2021 19:37:21 GMT + * Dependencies: [@types/node](https://npmjs.com/package/@types/node) + * Global values: none + +# Credits +These definitions were written by [BendingBender](https://github.com/BendingBender). diff --git a/node_modules/@types/tunnel/index.d.ts b/node_modules/@types/tunnel/index.d.ts new file mode 100755 index 0000000000..591eb2168d --- /dev/null +++ b/node_modules/@types/tunnel/index.d.ts @@ -0,0 +1,47 @@ +// Type definitions for tunnel 0.0 +// Project: https://github.com/koichik/node-tunnel/ +// Definitions by: BendingBender +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped + +/// +import { Agent } from 'http'; +import { Agent as HttpsAgent } from 'https'; + +export function httpOverHttp(options?: HttpOptions): Agent; +export function httpsOverHttp(options?: HttpsOverHttpOptions): Agent; +export function httpOverHttps(options?: HttpOverHttpsOptions): HttpsAgent; +export function httpsOverHttps(options?: HttpsOverHttpsOptions): HttpsAgent; + +export interface HttpOptions { + maxSockets?: number | undefined; + proxy?: ProxyOptions | undefined; +} + +export interface HttpsOverHttpOptions extends HttpOptions { + ca?: Buffer[] | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} + +export interface HttpOverHttpsOptions extends HttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface HttpsOverHttpsOptions extends HttpsOverHttpOptions { + proxy?: HttpsProxyOptions | undefined; +} + +export interface ProxyOptions { + host: string; + port: number; + localAddress?: string | undefined; + proxyAuth?: string | undefined; + headers?: { [key: string]: any } | undefined; +} + +export interface HttpsProxyOptions extends ProxyOptions { + ca?: Buffer[] | undefined; + servername?: string | undefined; + key?: Buffer | undefined; + cert?: Buffer | undefined; +} diff --git a/node_modules/@types/tunnel/package.json b/node_modules/@types/tunnel/package.json new file mode 100755 index 0000000000..d84dbaa44e --- /dev/null +++ b/node_modules/@types/tunnel/package.json @@ -0,0 +1,27 @@ +{ + "name": "@types/tunnel", + "version": "0.0.3", + "description": "TypeScript definitions for tunnel", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/tunnel", + "license": "MIT", + "contributors": [ + { + "name": "BendingBender", + "url": "https://github.com/BendingBender", + "githubUsername": "BendingBender" + } + ], + "main": "", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/tunnel" + }, + "scripts": {}, + "dependencies": { + "@types/node": "*" + }, + "typesPublisherContentHash": "049a52929c6badcca87fc0ebc590bc49744a75f883bf18df77393ba05ddb4e7b", + "typeScriptVersion": "3.6" +} \ No newline at end of file diff --git a/node_modules/abort-controller/LICENSE b/node_modules/abort-controller/LICENSE new file mode 100644 index 0000000000..c914149a6f --- /dev/null +++ b/node_modules/abort-controller/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/abort-controller/README.md b/node_modules/abort-controller/README.md new file mode 100644 index 0000000000..9de3e45fcf --- /dev/null +++ b/node_modules/abort-controller/README.md @@ -0,0 +1,98 @@ +# abort-controller + +[![npm version](https://img.shields.io/npm/v/abort-controller.svg)](https://www.npmjs.com/package/abort-controller) +[![Downloads/month](https://img.shields.io/npm/dm/abort-controller.svg)](http://www.npmtrends.com/abort-controller) +[![Build Status](https://travis-ci.org/mysticatea/abort-controller.svg?branch=master)](https://travis-ci.org/mysticatea/abort-controller) +[![Coverage Status](https://codecov.io/gh/mysticatea/abort-controller/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/abort-controller) +[![Dependency Status](https://david-dm.org/mysticatea/abort-controller.svg)](https://david-dm.org/mysticatea/abort-controller) + +An implementation of [WHATWG AbortController interface](https://dom.spec.whatwg.org/#interface-abortcontroller). + +```js +import AbortController from "abort-controller" + +const controller = new AbortController() +const signal = controller.signal + +signal.addEventListener("abort", () => { + console.log("aborted!") +}) + +controller.abort() +``` + +> https://jsfiddle.net/1r2994qp/1/ + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install abort-controller +``` + +Or download from [`dist` directory](./dist). + +- [dist/abort-controller.mjs](dist/abort-controller.mjs) ... ES modules version. +- [dist/abort-controller.js](dist/abort-controller.js) ... Common JS version. +- [dist/abort-controller.umd.js](dist/abort-controller.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +### Basic + +```js +import AbortController from "abort-controller" +// or +const AbortController = require("abort-controller") + +// or UMD version defines a global variable: +const AbortController = window.AbortControllerShim +``` + +If your bundler recognizes `browser` field of `package.json`, the imported `AbortController` is the native one and it doesn't contain shim (even if the native implementation was nothing). +If you wanted to polyfill `AbortController` for IE, use `abort-controller/polyfill`. + +### Polyfilling + +Importing `abort-controller/polyfill` assigns the `AbortController` shim to the `AbortController` global variable if the native implementation was nothing. + +```js +import "abort-controller/polyfill" +// or +require("abort-controller/polyfill") +``` + +### API + +#### AbortController + +> https://dom.spec.whatwg.org/#interface-abortcontroller + +##### controller.signal + +The [AbortSignal](https://dom.spec.whatwg.org/#interface-AbortSignal) object which is associated to this controller. + +##### controller.abort() + +Notify `abort` event to listeners that the `signal` has. + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/abort-controller/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/abort-controller/browser.js b/node_modules/abort-controller/browser.js new file mode 100644 index 0000000000..b0c5ec37d9 --- /dev/null +++ b/node_modules/abort-controller/browser.js @@ -0,0 +1,13 @@ +/*globals self, window */ +"use strict" + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +module.exports = AbortController +module.exports.AbortSignal = AbortSignal +module.exports.default = AbortController diff --git a/node_modules/abort-controller/browser.mjs b/node_modules/abort-controller/browser.mjs new file mode 100644 index 0000000000..a8f321afed --- /dev/null +++ b/node_modules/abort-controller/browser.mjs @@ -0,0 +1,11 @@ +/*globals self, window */ + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.d.ts b/node_modules/abort-controller/dist/abort-controller.d.ts new file mode 100644 index 0000000000..75852fb599 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.d.ts @@ -0,0 +1,43 @@ +import { EventTarget } from "event-target-shim" + +type Events = { + abort: any +} +type EventAttributes = { + onabort: any +} +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +declare class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() + /** + * Returns `true` if this `AbortSignal`"s `AbortController` has signaled to abort, and `false` otherwise. + */ + readonly aborted: boolean +} +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +declare class AbortController { + /** + * Initialize this controller. + */ + constructor() + /** + * Returns the `AbortSignal` object associated with this object. + */ + readonly signal: AbortSignal + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort(): void +} + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.js b/node_modules/abort-controller/dist/abort-controller.js new file mode 100644 index 0000000000..49af739558 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js @@ -0,0 +1,127 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var eventTargetShim = require('event-target-shim'); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports.default = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map diff --git a/node_modules/abort-controller/dist/abort-controller.js.map b/node_modules/abort-controller/dist/abort-controller.js.map new file mode 100644 index 0000000000..cfdcafdc61 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.js","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["EventTarget","defineEventAttribute"],"mappings":";;;;;;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQA,2BAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACDC,oCAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnDD,2BAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.mjs b/node_modules/abort-controller/dist/abort-controller.mjs new file mode 100644 index 0000000000..88ba22d557 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs @@ -0,0 +1,118 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +import { EventTarget, defineEventAttribute } from 'event-target-shim'; + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +export default AbortController; +export { AbortController, AbortSignal }; +//# sourceMappingURL=abort-controller.mjs.map diff --git a/node_modules/abort-controller/dist/abort-controller.mjs.map b/node_modules/abort-controller/dist/abort-controller.mjs.map new file mode 100644 index 0000000000..1e8fa6b00f --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"abort-controller.mjs","sources":["../src/abort-signal.ts","../src/abort-controller.ts"],"sourcesContent":["import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":[],"mappings":";;;;;;AAkBA;;;;AAIA,MAAqB,WAAY,SAAQ,WAAoC;;;;IAIzE;QACI,KAAK,EAAE,CAAA;QACP,MAAM,IAAI,SAAS,CAAC,4CAA4C,CAAC,CAAA;KACpE;;;;IAKD,IAAW,OAAO;QACd,MAAM,OAAO,GAAG,YAAY,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;QACtC,IAAI,OAAO,OAAO,KAAK,SAAS,EAAE;YAC9B,MAAM,IAAI,SAAS,CACf,0DACI,IAAI,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,IACpC,EAAE,CACL,CAAA;SACJ;QACD,OAAO,OAAO,CAAA;KACjB;CACJ;AACD,oBAAoB,CAAC,WAAW,CAAC,SAAS,EAAE,OAAO,CAAC,CAAA;;;;AAKpD,SAAgB,iBAAiB;IAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,CAAC,CAAA;IACnD,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;IACxB,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC/B,OAAO,MAAM,CAAA;CAChB;;;;AAKD,SAAgB,WAAW,CAAC,MAAmB;IAC3C,IAAI,YAAY,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,KAAK,EAAE;QACpC,OAAM;KACT;IAED,YAAY,CAAC,GAAG,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IAC9B,MAAM,CAAC,aAAa,CAAU,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC,CAAA;CACnD;;;;AAKD,MAAM,YAAY,GAAG,IAAI,OAAO,EAAwB,CAAA;;AAGxD,MAAM,CAAC,gBAAgB,CAAC,WAAW,CAAC,SAAS,EAAE;IAC3C,OAAO,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAChC,CAAC,CAAA;;AAGF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QAC7D,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,aAAa;KACvB,CAAC,CAAA;CACL;;ACpFD;;;;AAIA,MAAqB,eAAe;;;;IAIhC;QACI,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,iBAAiB,EAAE,CAAC,CAAA;KACzC;;;;IAKD,IAAW,MAAM;QACb,OAAO,SAAS,CAAC,IAAI,CAAC,CAAA;KACzB;;;;IAKM,KAAK;QACR,WAAW,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;KAC/B;CACJ;;;;AAKD,MAAM,OAAO,GAAG,IAAI,OAAO,EAAgC,CAAA;;;;AAK3D,SAAS,SAAS,CAAC,UAA2B;IAC1C,MAAM,MAAM,GAAG,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAA;IACtC,IAAI,MAAM,IAAI,IAAI,EAAE;QAChB,MAAM,IAAI,SAAS,CACf,8DACI,UAAU,KAAK,IAAI,GAAG,MAAM,GAAG,OAAO,UAC1C,EAAE,CACL,CAAA;KACJ;IACD,OAAO,MAAM,CAAA;CAChB;;AAGD,MAAM,CAAC,gBAAgB,CAAC,eAAe,CAAC,SAAS,EAAE;IAC/C,MAAM,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;IAC5B,KAAK,EAAE,EAAE,UAAU,EAAE,IAAI,EAAE;CAC9B,CAAC,CAAA;AAEF,IAAI,OAAO,MAAM,KAAK,UAAU,IAAI,OAAO,MAAM,CAAC,WAAW,KAAK,QAAQ,EAAE;IACxE,MAAM,CAAC,cAAc,CAAC,eAAe,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,EAAE;QACjE,YAAY,EAAE,IAAI;QAClB,KAAK,EAAE,iBAAiB;KAC3B,CAAC,CAAA;CACL;;;;;"} \ No newline at end of file diff --git a/node_modules/abort-controller/dist/abort-controller.umd.js b/node_modules/abort-controller/dist/abort-controller.umd.js new file mode 100644 index 0000000000..f643cfd6b6 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.umd.js @@ -0,0 +1,5 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d\n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexport default EventTarget;\nexport { defineEventAttribute, EventTarget };\n//# sourceMappingURL=event-target-shim.mjs.map\n","import {\n // Event,\n EventTarget,\n // Type,\n defineEventAttribute,\n} from \"event-target-shim\"\n\n// Known Limitation\n// Use `any` because the type of `AbortSignal` in `lib.dom.d.ts` is wrong and\n// to make assignable our `AbortSignal` into that.\n// https://github.com/Microsoft/TSJS-lib-generator/pull/623\ntype Events = {\n abort: any // Event & Type<\"abort\">\n}\ntype EventAttributes = {\n onabort: any // Event & Type<\"abort\">\n}\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nexport default class AbortSignal extends EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n public constructor() {\n super()\n throw new TypeError(\"AbortSignal cannot be constructed directly\")\n }\n\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n public get aborted(): boolean {\n const aborted = abortedFlags.get(this)\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(\n `Expected 'this' to be an 'AbortSignal' object, but got ${\n this === null ? \"null\" : typeof this\n }`,\n )\n }\n return aborted\n }\n}\ndefineEventAttribute(AbortSignal.prototype, \"abort\")\n\n/**\n * Create an AbortSignal object.\n */\nexport function createAbortSignal(): AbortSignal {\n const signal = Object.create(AbortSignal.prototype)\n EventTarget.call(signal)\n abortedFlags.set(signal, false)\n return signal\n}\n\n/**\n * Abort a given signal.\n */\nexport function abortSignal(signal: AbortSignal): void {\n if (abortedFlags.get(signal) !== false) {\n return\n }\n\n abortedFlags.set(signal, true)\n signal.dispatchEvent<\"abort\">({ type: \"abort\" })\n}\n\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap()\n\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n})\n\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n })\n}\n","import AbortSignal, { abortSignal, createAbortSignal } from \"./abort-signal\"\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nexport default class AbortController {\n /**\n * Initialize this controller.\n */\n public constructor() {\n signals.set(this, createAbortSignal())\n }\n\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n public get signal(): AbortSignal {\n return getSignal(this)\n }\n\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n public abort(): void {\n abortSignal(getSignal(this))\n }\n}\n\n/**\n * Associated signals.\n */\nconst signals = new WeakMap()\n\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller: AbortController): AbortSignal {\n const signal = signals.get(controller)\n if (signal == null) {\n throw new TypeError(\n `Expected 'this' to be an 'AbortController' object, but got ${\n controller === null ? \"null\" : typeof controller\n }`,\n )\n }\n return signal\n}\n\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n})\n\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n })\n}\n\nexport { AbortController, AbortSignal }\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","createAbortSignal","signal","AbortSignal","EventTarget","abortedFlags","abortSignal","dispatchEvent","type","getSignal","controller","signals","TypeError","WeakMap","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","aborted","defineEventAttribute","defineProperties","Symbol","_typeof","toStringTag","AbortController","abort"],"mappings":";;;+3CAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZ,CAAgBH,CAAhB,QACbI,CAAAA,OAAO,CAACC,MAAR,CACY,IAAR,EAAAJ,CADJ,CAEI,6CAFJ,CAGID,CAHJ,EAKOC,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxB,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAAL,CAAWS,UAbS,GAiBzBF,CAAI,CAACG,QAAL,GAjByB,CAkBgB,UAArC,QAAOH,CAAAA,CAAI,CAACP,KAAL,CAAWW,cAlBG,EAmBrBJ,CAAI,CAACP,KAAL,CAAWW,cAAX,EAnBqB,QAGE,WAAnB,QAAOP,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAAR,CACI,oEADJ,CAEIL,CAAI,CAACC,eAFT,CANiB,EAiC7B,QAASK,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZ,CAAgB,IAAhB,CAAsB,CAClBD,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAAN,EAAmBC,IAAI,CAACC,GAAL,EATZ,CAAtB,CAD+B,CAc/BC,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4B,WAA5B,CAAyC,CAAEC,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzC,CAd+B,QAmBrBC,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAY5B,CAAZ,EACJ6B,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAP,CAAsB,IAAtB,CAA4BG,CAA5B,CAAiCI,CAAwB,CAACJ,CAAD,CAAzD,EAyOZ,QAASI,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,CAFR,CAAA,CAIHZ,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe2B,CAAf,EAAsBF,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAF,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL,CAAWO,KAAX,CAAiBlC,CAAjB,CAAwBmC,SAAxB,CAHR,CAAA,CAKHH,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAV,CAAe,IAAf,CAAqB1B,CAArB,CAAkCd,CAAlC,KAPE4B,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAP,CAAYU,CAAZ,KACO,CAAhB,GAAAV,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZ,CAAwBlB,MAAM,CAACmB,MAAP,CAAcL,CAAS,CAACI,SAAxB,CAAmC,CACvDE,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnC,CAXa,KAgBhC,GACKjB,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAP,CAAgCR,CAAhC,CAAuCX,CAAvC,CADY,CAEzBoB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAP,CACIe,CAAW,CAACE,SADhB,CAEId,CAFJ,CAGIoB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlC,QAUDY,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAAT,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT,CAAamC,CAAb,QACC,KAAX,EAAAW,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBb,CAAtB,CAAD,CAAX,CAA2CA,CAA3C,EACvBY,CAAQ,CAACnC,GAAT,CAAauB,CAAb,CAAoBW,CAApB,GAEGA,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP,CAAsBnD,CAAtB,CAAD,QACnB,IAAIqD,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAF,CAAUmB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAF,CAAUgB,UAAV,CAAuBA,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAF,CAAUiB,aAAV,CAA0BA,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAF,CAAUQ,eAAV,CAA4BA,ysCC1ahBkD,CAAAA,OACNC,CAAAA,CAAM,CAAGpC,MAAM,CAACmB,MAAPnB,CAAcqC,CAAW,CAACnB,SAA1BlB,QACfsC,CAAAA,CAAW,CAACrB,IAAZqB,CAAiBF,CAAjBE,EACAC,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACOH,UAMKI,CAAAA,EAAYJ,GACpBG,KAAAA,CAAY,CAAC3D,GAAb2D,CAAiBH,CAAjBG,IAIJA,CAAY,CAAC/C,GAAb+C,CAAiBH,CAAjBG,KACAH,CAAM,CAACK,aAAPL,CAA8B,CAAEM,IAAI,CAAE,OAAR,CAA9BN,GC9BJ,QAASO,CAAAA,CAAT,CAAmBC,CAAnB,KACUR,CAAAA,CAAM,CAAGS,CAAO,CAACjE,GAARiE,CAAYD,CAAZC,KACD,IAAVT,EAAAA,OACM,IAAIU,CAAAA,SAAJ,sEAEiB,IAAfF,GAAAA,CAAU,CAAY,MAAZ,GAA4BA,GAFxC,QAMHR,CAAAA,KF3BLzD,CAAAA,CAAW,CAAG,GAAIoE,CAAAA,QAOlBpB,CAAQ,CAAG,GAAIoB,CAAAA,QAkFrBzD,CAAK,CAAC4B,SAAN,CAAkB,IAKVwB,CAAAA,MAAO,OACAlE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiE,IANZ,CAAA,IAaVM,CAAAA,QAAS,OACFxE,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASkB,aAtBN,CAAA,CA4BduD,YA5Bc,WA4BC,IACLvD,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAF,CAASkB,cADpB,MAEU,KAAjB,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVwD,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV5D,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASiB,UAzEN,CAAA,CAgFd6D,eAhFc,WAgFI,IACRtE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHc,CAI4B,UAAtC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAW6E,eAJR,EAKVtE,CAAI,CAACP,KAAL,CAAW6E,eAAX,EArFM,CAAA,CA6FdC,wBA7Fc,WA6Fa,IACjBvE,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,GAHuB,CAIvBX,CAAI,CAACY,gBAAL,GAJuB,CAK4B,UAA/C,QAAOZ,CAAAA,CAAI,CAACP,KAAL,CAAW8E,wBALC,EAMnBvE,CAAI,CAACP,KAAL,CAAW8E,wBAAX,EAnGM,CAAA,IA2GVC,CAAAA,SAAU,SACKhF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAe+E,OA5GpB,CAAA,IAmHVtE,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeS,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIViF,CAAAA,kBAAmB,OACZjF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASW,QApIN,CAAA,IA2IVuE,CAAAA,UAAW,SACIlF,CAAE,CAAC,IAAD,CAAF,CAASC,KAAT,CAAeiF,QA5IpB,CAAA,IAmJV7D,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASqB,SApJN,CAAA,IA4JV8D,CAAAA,YAAa,OACNnF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASe,WA7JN,CAAA,IAqKVqE,CAAAA,cAAe,OACRpF,CAAAA,CAAE,CAAC,IAAD,CAAF,CAASmB,OAtKN,CAAA,IAwKViE,CAAAA,aAAa1D,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAAL,IACuC,SAAnC,QAAOX,CAAAA,CAAI,CAACP,KAAL,CAAWmF,eAClB5E,CAAI,CAACP,KAAL,CAAWmF,YAAX,KAhLM,CAAA,IAyLVC,CAAAA,aAAc,OACP,CAACrF,CAAE,CAAC,IAAD,CAAF,CAASW,QA1LP,CAAA,IA4LV0E,CAAAA,YAAY3D,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMdsF,SAzMc,WAyMF,EAzME,EA+MlB9D,MAAM,CAACC,cAAP,CAAsBX,CAAK,CAAC4B,SAA5B,CAAuC,aAAvC,CAAsD,CAClDhB,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtD,EAOsB,WAAlB,QAAO0C,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAACzE,QAC/CU,MAAM,CAACgE,cAAP,CAAsB1E,CAAK,CAAC4B,SAA5B,CAAuC6C,MAAM,CAACzE,KAAP,CAAa4B,SAApD,EAGAS,CAAQ,CAACnC,GAAT,CAAauE,MAAM,CAACzE,KAAP,CAAa4B,SAA1B,CAAqC5B,CAArC,wiDChTiB+C,CAAAA,2EAMP,GAAIS,CAAAA,SAAJ,CAAc,4CAAd,sDAOAmB,CAAAA,CAAO,CAAG1B,CAAY,CAAC3D,GAAb2D,CAAiB,IAAjBA,KACO,SAAnB,QAAO0B,CAAAA,OACD,IAAInB,CAAAA,SAAJ,kEAEW,IAAT,QAAgB,MAAhB,GAAgC,MAFlC,QAMHmB,CAAAA,SArB0B3B,GAwBzC4B,CAAoB,CAAC7B,CAAW,CAACnB,SAAb,CAAwB,OAAxB,EA2BpB,GAAMqB,CAAAA,CAAY,CAAG,GAAIQ,CAAAA,OAAzB,CAGA/C,MAAM,CAACmE,gBAAPnE,CAAwBqC,CAAW,CAACnB,SAApClB,CAA+C,CAC3CiE,OAAO,CAAE,CAAE9D,UAAU,GAAZ,CADkC,CAA/CH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBqC,CAAW,CAACnB,SAAlClB,CAA6CoE,MAAM,CAACE,WAApDtE,CAAiE,CAC7DS,YAAY,GADiD,CAE7DP,KAAK,CAAE,aAFsD,CAAjEF,KC5EiBuE,CAAAA,oCAKb1B,CAAO,CAACrD,GAARqD,CAAY,IAAZA,CAAkBV,CAAiB,EAAnCU,4CAcAL,CAAW,CAACG,CAAS,CAAC,IAAD,CAAV,uCAPJA,CAAAA,CAAS,CAAC,IAAD,WAclBE,CAAO,CAAG,GAAIE,CAAAA,WAkBpB/C,MAAM,CAACmE,gBAAPnE,CAAwBuE,CAAe,CAACrD,SAAxClB,CAAmD,CAC/CoC,MAAM,CAAE,CAAEjC,UAAU,GAAZ,CADuC,CAE/CqE,KAAK,CAAE,CAAErE,UAAU,GAAZ,CAFwC,CAAnDH,EAKsB,UAAlB,QAAOoE,CAAAA,MAAP,EAA8D,QAA9B,GAAAC,EAAOD,MAAM,CAACE,cAC9CtE,MAAM,CAACC,cAAPD,CAAsBuE,CAAe,CAACrD,SAAtClB,CAAiDoE,MAAM,CAACE,WAAxDtE,CAAqE,CACjES,YAAY,GADqD,CAEjEP,KAAK,CAAE,iBAF0D,CAArEF"} \ No newline at end of file diff --git a/node_modules/abort-controller/package.json b/node_modules/abort-controller/package.json new file mode 100644 index 0000000000..fc705e03f3 --- /dev/null +++ b/node_modules/abort-controller/package.json @@ -0,0 +1,97 @@ +{ + "name": "abort-controller", + "version": "3.0.0", + "description": "An implementation of WHATWG AbortController interface.", + "main": "dist/abort-controller", + "files": [ + "dist", + "polyfill.*", + "browser.*" + ], + "engines": { + "node": ">=6.5" + }, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "browser": "./browser.js", + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.3.0", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "@types/mocha": "^5.2.5", + "@types/node": "^10.12.18", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "dts-bundle-generator": "^2.0.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.1.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.2", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-typescript": "^1.0.0", + "rollup-watch": "^4.3.1", + "ts-node": "^8.0.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run -s build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "opener coverage/lcov-report/index.html", + "lint": "eslint . --ext .ts", + "build": "run-s -s build:*", + "build:rollup": "rollup -c", + "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts", + "test": "run-s -s lint test:*", + "test:mocha": "nyc mocha test/*.ts", + "test:karma": "karma start --single-run", + "watch": "run-p -s watch:*", + "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl", + "watch:karma": "karma start --watch", + "codecov": "codecov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mysticatea/abort-controller.git" + }, + "keywords": [ + "w3c", + "whatwg", + "event", + "events", + "abort", + "cancel", + "abortcontroller", + "abortsignal", + "controller", + "signal", + "shim" + ], + "author": "Toru Nagashima (https://github.com/mysticatea)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/abort-controller/issues" + }, + "homepage": "https://github.com/mysticatea/abort-controller#readme" +} diff --git a/node_modules/abort-controller/polyfill.js b/node_modules/abort-controller/polyfill.js new file mode 100644 index 0000000000..3ca892330b --- /dev/null +++ b/node_modules/abort-controller/polyfill.js @@ -0,0 +1,21 @@ +/*globals require, self, window */ +"use strict" + +const ac = require("./dist/abort-controller") + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/abort-controller/polyfill.mjs b/node_modules/abort-controller/polyfill.mjs new file mode 100644 index 0000000000..0602a64ddd --- /dev/null +++ b/node_modules/abort-controller/polyfill.mjs @@ -0,0 +1,19 @@ +/*globals self, window */ +import * as ac from "./dist/abort-controller" + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/asynckit/LICENSE b/node_modules/asynckit/LICENSE new file mode 100644 index 0000000000..c9eca5dd99 --- /dev/null +++ b/node_modules/asynckit/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Alex Indigo + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/asynckit/README.md b/node_modules/asynckit/README.md new file mode 100644 index 0000000000..ddcc7e6b95 --- /dev/null +++ b/node_modules/asynckit/README.md @@ -0,0 +1,233 @@ +# asynckit [![NPM Module](https://img.shields.io/npm/v/asynckit.svg?style=flat)](https://www.npmjs.com/package/asynckit) + +Minimal async jobs utility library, with streams support. + +[![PhantomJS Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=browser&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Linux Build](https://img.shields.io/travis/alexindigo/asynckit/v0.4.0.svg?label=linux:0.12-6.x&style=flat)](https://travis-ci.org/alexindigo/asynckit) +[![Windows Build](https://img.shields.io/appveyor/ci/alexindigo/asynckit/v0.4.0.svg?label=windows:0.12-6.x&style=flat)](https://ci.appveyor.com/project/alexindigo/asynckit) + +[![Coverage Status](https://img.shields.io/coveralls/alexindigo/asynckit/v0.4.0.svg?label=code+coverage&style=flat)](https://coveralls.io/github/alexindigo/asynckit?branch=master) +[![Dependency Status](https://img.shields.io/david/alexindigo/asynckit/v0.4.0.svg?style=flat)](https://david-dm.org/alexindigo/asynckit) +[![bitHound Overall Score](https://www.bithound.io/github/alexindigo/asynckit/badges/score.svg)](https://www.bithound.io/github/alexindigo/asynckit) + + + +AsyncKit provides harness for `parallel` and `serial` iterators over list of items represented by arrays or objects. +Optionally it accepts abort function (should be synchronously return by iterator for each item), and terminates left over jobs upon an error event. For specific iteration order built-in (`ascending` and `descending`) and custom sort helpers also supported, via `asynckit.serialOrdered` method. + +It ensures async operations to keep behavior more stable and prevent `Maximum call stack size exceeded` errors, from sync iterators. + +| compression | size | +| :----------------- | -------: | +| asynckit.js | 12.34 kB | +| asynckit.min.js | 4.11 kB | +| asynckit.min.js.gz | 1.47 kB | + + +## Install + +```sh +$ npm install --save asynckit +``` + +## Examples + +### Parallel Jobs + +Runs iterator over provided array in parallel. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will terminate rest of the active jobs (if abort function is provided) +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var parallel = require('asynckit').parallel + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , target = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// async job accepts one element from the array +// and a callback function +function asyncJob(item, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-array.js](test/test-parallel-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var parallel = require('asynckit/parallel') + , assert = require('assert') + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 2, 4, 8, 16, 32, 64 ] + , expectedKeys = [ 'first', 'one', 'two', 'four', 'eight', 'sixteen', 'thirtyTwo', 'sixtyFour' ] + , target = [] + , keys = [] + ; + +parallel(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); + assert.deepEqual(keys, expectedKeys); +}); + +// supports full value, key, callback (shortcut) interface +function asyncJob(item, key, cb) +{ + // different delays (in ms) per item + var delay = item * 25; + + // pretend different jobs take different time to finish + // and not in consequential order + var timeoutId = setTimeout(function() { + keys.push(key); + target.push(item); + cb(null, item * 2); + }, delay); + + // allow to cancel "leftover" jobs upon error + // return function, invoking of which will abort this job + return clearTimeout.bind(null, timeoutId); +} +``` + +More examples could be found in [test/test-parallel-object.js](test/test-parallel-object.js). + +### Serial Jobs + +Runs iterator over provided array sequentially. Stores output in the `result` array, +on the matching positions. In unlikely event of an error from one of the jobs, +will not proceed to the rest of the items in the list +and return error along with salvaged data to the main callback function. + +#### Input Array + +```javascript +var serial = require('asynckit/serial') + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// extended interface (item, key, callback) +// also supported for arrays +function asyncJob(item, key, cb) +{ + target.push(key); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-array.js](test/test-serial-array.js). + +#### Input Object + +Also it supports named jobs, listed via object. + +```javascript +var serial = require('asynckit').serial + , assert = require('assert') + ; + +var source = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , expectedResult = [ 2, 2, 8, 32, 128, 64, 16, 4 ] + , expectedTarget = [ 0, 1, 2, 3, 4, 5, 6, 7 ] + , target = [] + ; + +var source = { first: 1, one: 1, four: 4, sixteen: 16, sixtyFour: 64, thirtyTwo: 32, eight: 8, two: 2 } + , expectedResult = { first: 2, one: 2, four: 8, sixteen: 32, sixtyFour: 128, thirtyTwo: 64, eight: 16, two: 4 } + , expectedTarget = [ 1, 1, 4, 16, 64, 32, 8, 2 ] + , target = [] + ; + + +serial(source, asyncJob, function(err, result) +{ + assert.deepEqual(result, expectedResult); + assert.deepEqual(target, expectedTarget); +}); + +// shortcut interface (item, callback) +// works for object as well as for the arrays +function asyncJob(item, cb) +{ + target.push(item); + + // it will be automatically made async + // even it iterator "returns" in the same event loop + cb(null, item * 2); +} +``` + +More examples could be found in [test/test-serial-object.js](test/test-serial-object.js). + +_Note: Since _object_ is an _unordered_ collection of properties, +it may produce unexpected results with sequential iterations. +Whenever order of the jobs' execution is important please use `serialOrdered` method._ + +### Ordered Serial Iterations + +TBD + +For example [compare-property](compare-property) package. + +### Streaming interface + +TBD + +## Want to Know More? + +More examples can be found in [test folder](test/). + +Or open an [issue](https://github.com/alexindigo/asynckit/issues) with questions and/or suggestions. + +## License + +AsyncKit is licensed under the MIT license. diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js new file mode 100644 index 0000000000..c612f1a55f --- /dev/null +++ b/node_modules/asynckit/bench.js @@ -0,0 +1,76 @@ +/* eslint no-console: "off" */ + +var asynckit = require('./') + , async = require('async') + , assert = require('assert') + , expected = 0 + ; + +var Benchmark = require('benchmark'); +var suite = new Benchmark.Suite; + +var source = []; +for (var z = 1; z < 100; z++) +{ + source.push(z); + expected += z; +} + +suite +// add tests + +.add('async.map', function(deferred) +{ + var total = 0; + + async.map(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +.add('asynckit.parallel', function(deferred) +{ + var total = 0; + + asynckit.parallel(source, + function(i, cb) + { + setImmediate(function() + { + total += i; + cb(null, total); + }); + }, + function(err, result) + { + assert.ifError(err); + assert.equal(result[result.length - 1], expected); + deferred.resolve(); + }); +}, {'defer': true}) + + +// add listeners +.on('cycle', function(ev) +{ + console.log(String(ev.target)); +}) +.on('complete', function() +{ + console.log('Fastest is ' + this.filter('fastest').map('name')); +}) +// run async +.run({ 'async': true }); diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js new file mode 100644 index 0000000000..455f9454ee --- /dev/null +++ b/node_modules/asynckit/index.js @@ -0,0 +1,6 @@ +module.exports = +{ + parallel : require('./parallel.js'), + serial : require('./serial.js'), + serialOrdered : require('./serialOrdered.js') +}; diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js new file mode 100644 index 0000000000..114367e5fb --- /dev/null +++ b/node_modules/asynckit/lib/abort.js @@ -0,0 +1,29 @@ +// API +module.exports = abort; + +/** + * Aborts leftover active jobs + * + * @param {object} state - current state object + */ +function abort(state) +{ + Object.keys(state.jobs).forEach(clean.bind(state)); + + // reset leftover jobs + state.jobs = {}; +} + +/** + * Cleans up leftover job by invoking abort function for the provided job id + * + * @this state + * @param {string|number} key - job id to abort + */ +function clean(key) +{ + if (typeof this.jobs[key] == 'function') + { + this.jobs[key](); + } +} diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js new file mode 100644 index 0000000000..7f1288a4ce --- /dev/null +++ b/node_modules/asynckit/lib/async.js @@ -0,0 +1,34 @@ +var defer = require('./defer.js'); + +// API +module.exports = async; + +/** + * Runs provided callback asynchronously + * even if callback itself is not + * + * @param {function} callback - callback to invoke + * @returns {function} - augmented callback + */ +function async(callback) +{ + var isAsync = false; + + // check if async happened + defer(function() { isAsync = true; }); + + return function async_callback(err, result) + { + if (isAsync) + { + callback(err, result); + } + else + { + defer(function nextTick_callback() + { + callback(err, result); + }); + } + }; +} diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js new file mode 100644 index 0000000000..b67110c7ad --- /dev/null +++ b/node_modules/asynckit/lib/defer.js @@ -0,0 +1,26 @@ +module.exports = defer; + +/** + * Runs provided function on next iteration of the event loop + * + * @param {function} fn - function to run + */ +function defer(fn) +{ + var nextTick = typeof setImmediate == 'function' + ? setImmediate + : ( + typeof process == 'object' && typeof process.nextTick == 'function' + ? process.nextTick + : null + ); + + if (nextTick) + { + nextTick(fn); + } + else + { + setTimeout(fn, 0); + } +} diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js new file mode 100644 index 0000000000..5d2839a590 --- /dev/null +++ b/node_modules/asynckit/lib/iterate.js @@ -0,0 +1,75 @@ +var async = require('./async.js') + , abort = require('./abort.js') + ; + +// API +module.exports = iterate; + +/** + * Iterates over each job object + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {object} state - current job status + * @param {function} callback - invoked when all elements processed + */ +function iterate(list, iterator, state, callback) +{ + // store current index + var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; + + state.jobs[key] = runJob(iterator, key, list[key], function(error, output) + { + // don't repeat yourself + // skip secondary callbacks + if (!(key in state.jobs)) + { + return; + } + + // clean up jobs + delete state.jobs[key]; + + if (error) + { + // don't process rest of the results + // stop still active jobs + // and reset the list + abort(state); + } + else + { + state.results[key] = output; + } + + // return salvaged results + callback(error, state.results); + }); +} + +/** + * Runs iterator over provided job element + * + * @param {function} iterator - iterator to invoke + * @param {string|number} key - key/index of the element in the list of jobs + * @param {mixed} item - job description + * @param {function} callback - invoked after iterator is done with the job + * @returns {function|mixed} - job abort function or something else + */ +function runJob(iterator, key, item, callback) +{ + var aborter; + + // allow shortcut if iterator expects only two arguments + if (iterator.length == 2) + { + aborter = iterator(item, async(callback)); + } + // otherwise go with full three arguments + else + { + aborter = iterator(item, key, async(callback)); + } + + return aborter; +} diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js new file mode 100644 index 0000000000..78ad240f0a --- /dev/null +++ b/node_modules/asynckit/lib/readable_asynckit.js @@ -0,0 +1,91 @@ +var streamify = require('./streamify.js') + , defer = require('./defer.js') + ; + +// API +module.exports = ReadableAsyncKit; + +/** + * Base constructor for all streams + * used to hold properties/methods + */ +function ReadableAsyncKit() +{ + ReadableAsyncKit.super_.apply(this, arguments); + + // list of active jobs + this.jobs = {}; + + // add stream methods + this.destroy = destroy; + this._start = _start; + this._read = _read; +} + +/** + * Destroys readable stream, + * by aborting outstanding jobs + * + * @returns {void} + */ +function destroy() +{ + if (this.destroyed) + { + return; + } + + this.destroyed = true; + + if (typeof this.terminator == 'function') + { + this.terminator(); + } +} + +/** + * Starts provided jobs in async manner + * + * @private + */ +function _start() +{ + // first argument – runner function + var runner = arguments[0] + // take away first argument + , args = Array.prototype.slice.call(arguments, 1) + // second argument - input data + , input = args[0] + // last argument - result callback + , endCb = streamify.callback.call(this, args[args.length - 1]) + ; + + args[args.length - 1] = endCb; + // third argument - iterator + args[1] = streamify.iterator.call(this, args[1]); + + // allow time for proper setup + defer(function() + { + if (!this.destroyed) + { + this.terminator = runner.apply(null, args); + } + else + { + endCb(null, Array.isArray(input) ? [] : {}); + } + }.bind(this)); +} + + +/** + * Implement _read to comply with Readable streams + * Doesn't really make sense for flowing object mode + * + * @private + */ +function _read() +{ + +} diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js new file mode 100644 index 0000000000..5d2929f7a6 --- /dev/null +++ b/node_modules/asynckit/lib/readable_parallel.js @@ -0,0 +1,25 @@ +var parallel = require('../parallel.js'); + +// API +module.exports = ReadableParallel; + +/** + * Streaming wrapper to `asynckit.parallel` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableParallel(list, iterator, callback) +{ + if (!(this instanceof ReadableParallel)) + { + return new ReadableParallel(list, iterator, callback); + } + + // turn on object mode + ReadableParallel.super_.call(this, {objectMode: true}); + + this._start(parallel, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js new file mode 100644 index 0000000000..7822698204 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial.js @@ -0,0 +1,25 @@ +var serial = require('../serial.js'); + +// API +module.exports = ReadableSerial; + +/** + * Streaming wrapper to `asynckit.serial` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerial(list, iterator, callback) +{ + if (!(this instanceof ReadableSerial)) + { + return new ReadableSerial(list, iterator, callback); + } + + // turn on object mode + ReadableSerial.super_.call(this, {objectMode: true}); + + this._start(serial, list, iterator, callback); +} diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js new file mode 100644 index 0000000000..3de89c4729 --- /dev/null +++ b/node_modules/asynckit/lib/readable_serial_ordered.js @@ -0,0 +1,29 @@ +var serialOrdered = require('../serialOrdered.js'); + +// API +module.exports = ReadableSerialOrdered; +// expose sort helpers +module.exports.ascending = serialOrdered.ascending; +module.exports.descending = serialOrdered.descending; + +/** + * Streaming wrapper to `asynckit.serialOrdered` + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {stream.Readable#} + */ +function ReadableSerialOrdered(list, iterator, sortMethod, callback) +{ + if (!(this instanceof ReadableSerialOrdered)) + { + return new ReadableSerialOrdered(list, iterator, sortMethod, callback); + } + + // turn on object mode + ReadableSerialOrdered.super_.call(this, {objectMode: true}); + + this._start(serialOrdered, list, iterator, sortMethod, callback); +} diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js new file mode 100644 index 0000000000..cbea7ad8f6 --- /dev/null +++ b/node_modules/asynckit/lib/state.js @@ -0,0 +1,37 @@ +// API +module.exports = state; + +/** + * Creates initial state object + * for iteration over list + * + * @param {array|object} list - list to iterate over + * @param {function|null} sortMethod - function to use for keys sort, + * or `null` to keep them as is + * @returns {object} - initial state object + */ +function state(list, sortMethod) +{ + var isNamedList = !Array.isArray(list) + , initState = + { + index : 0, + keyedList: isNamedList || sortMethod ? Object.keys(list) : null, + jobs : {}, + results : isNamedList ? {} : [], + size : isNamedList ? Object.keys(list).length : list.length + } + ; + + if (sortMethod) + { + // sort array keys based on it's values + // sort object's keys just on own merit + initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) + { + return sortMethod(list[a], list[b]); + }); + } + + return initState; +} diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js new file mode 100644 index 0000000000..f56a1c92bf --- /dev/null +++ b/node_modules/asynckit/lib/streamify.js @@ -0,0 +1,141 @@ +var async = require('./async.js'); + +// API +module.exports = { + iterator: wrapIterator, + callback: wrapCallback +}; + +/** + * Wraps iterators with long signature + * + * @this ReadableAsyncKit# + * @param {function} iterator - function to wrap + * @returns {function} - wrapped function + */ +function wrapIterator(iterator) +{ + var stream = this; + + return function(item, key, cb) + { + var aborter + , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key)) + ; + + stream.jobs[key] = wrappedCb; + + // it's either shortcut (item, cb) + if (iterator.length == 2) + { + aborter = iterator(item, wrappedCb); + } + // or long format (item, key, cb) + else + { + aborter = iterator(item, key, wrappedCb); + } + + return aborter; + }; +} + +/** + * Wraps provided callback function + * allowing to execute snitch function before + * real callback + * + * @this ReadableAsyncKit# + * @param {function} callback - function to wrap + * @returns {function} - wrapped function + */ +function wrapCallback(callback) +{ + var stream = this; + + var wrapped = function(error, result) + { + return finisher.call(stream, error, result, callback); + }; + + return wrapped; +} + +/** + * Wraps provided iterator callback function + * makes sure snitch only called once, + * but passes secondary calls to the original callback + * + * @this ReadableAsyncKit# + * @param {function} callback - callback to wrap + * @param {number|string} key - iteration key + * @returns {function} wrapped callback + */ +function wrapIteratorCallback(callback, key) +{ + var stream = this; + + return function(error, output) + { + // don't repeat yourself + if (!(key in stream.jobs)) + { + callback(error, output); + return; + } + + // clean up jobs + delete stream.jobs[key]; + + return streamer.call(stream, error, {key: key, value: output}, callback); + }; +} + +/** + * Stream wrapper for iterator callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects iterator results + */ +function streamer(error, output, callback) +{ + if (error && !this.error) + { + this.error = error; + this.pause(); + this.emit('error', error); + // send back value only, as expected + callback(error, output && output.value); + return; + } + + // stream stuff + this.push(output); + + // back to original track + // send back value only, as expected + callback(error, output && output.value); +} + +/** + * Stream wrapper for finishing callback + * + * @this ReadableAsyncKit# + * @param {mixed} error - error response + * @param {mixed} output - iterator output + * @param {function} callback - callback that expects final results + */ +function finisher(error, output, callback) +{ + // signal end of the stream + // only for successfully finished streams + if (!error) + { + this.push(null); + } + + // back to original track + callback(error, output); +} diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js new file mode 100644 index 0000000000..d6eb99219f --- /dev/null +++ b/node_modules/asynckit/lib/terminator.js @@ -0,0 +1,29 @@ +var abort = require('./abort.js') + , async = require('./async.js') + ; + +// API +module.exports = terminator; + +/** + * Terminates jobs in the attached state context + * + * @this AsyncKitState# + * @param {function} callback - final callback to invoke after termination + */ +function terminator(callback) +{ + if (!Object.keys(this.jobs).length) + { + return; + } + + // fast forward iteration index + this.index = this.size; + + // abort jobs + abort(this); + + // send back results we have so far + async(callback)(null, this.results); +} diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json new file mode 100644 index 0000000000..51147d6569 --- /dev/null +++ b/node_modules/asynckit/package.json @@ -0,0 +1,63 @@ +{ + "name": "asynckit", + "version": "0.4.0", + "description": "Minimal async jobs utility library, with streams support", + "main": "index.js", + "scripts": { + "clean": "rimraf coverage", + "lint": "eslint *.js lib/*.js test/*.js", + "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec", + "win-test": "tape test/test-*.js", + "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec", + "report": "istanbul report", + "size": "browserify index.js | size-table asynckit", + "debug": "tape test/test-*.js" + }, + "pre-commit": [ + "clean", + "lint", + "test", + "browser", + "report", + "size" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/alexindigo/asynckit.git" + }, + "keywords": [ + "async", + "jobs", + "parallel", + "serial", + "iterator", + "array", + "object", + "stream", + "destroy", + "terminate", + "abort" + ], + "author": "Alex Indigo ", + "license": "MIT", + "bugs": { + "url": "https://github.com/alexindigo/asynckit/issues" + }, + "homepage": "https://github.com/alexindigo/asynckit#readme", + "devDependencies": { + "browserify": "^13.0.0", + "browserify-istanbul": "^2.0.0", + "coveralls": "^2.11.9", + "eslint": "^2.9.0", + "istanbul": "^0.4.3", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.7", + "pre-commit": "^1.1.3", + "reamde": "^1.1.0", + "rimraf": "^2.5.2", + "size-table": "^0.2.0", + "tap-spec": "^4.1.1", + "tape": "^4.5.1" + }, + "dependencies": {} +} diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js new file mode 100644 index 0000000000..3c50344d85 --- /dev/null +++ b/node_modules/asynckit/parallel.js @@ -0,0 +1,43 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = parallel; + +/** + * Runs iterator over provided array elements in parallel + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function parallel(list, iterator, callback) +{ + var state = initState(list); + + while (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, function(error, result) + { + if (error) + { + callback(error, result); + return; + } + + // looks like it's the last one + if (Object.keys(state.jobs).length === 0) + { + callback(null, state.results); + return; + } + }); + + state.index++; + } + + return terminator.bind(state, callback); +} diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js new file mode 100644 index 0000000000..6cd949a677 --- /dev/null +++ b/node_modules/asynckit/serial.js @@ -0,0 +1,17 @@ +var serialOrdered = require('./serialOrdered.js'); + +// Public API +module.exports = serial; + +/** + * Runs iterator over provided array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serial(list, iterator, callback) +{ + return serialOrdered(list, iterator, null, callback); +} diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js new file mode 100644 index 0000000000..607eafea56 --- /dev/null +++ b/node_modules/asynckit/serialOrdered.js @@ -0,0 +1,75 @@ +var iterate = require('./lib/iterate.js') + , initState = require('./lib/state.js') + , terminator = require('./lib/terminator.js') + ; + +// Public API +module.exports = serialOrdered; +// sorting helpers +module.exports.ascending = ascending; +module.exports.descending = descending; + +/** + * Runs iterator over provided sorted array elements in series + * + * @param {array|object} list - array or object (named list) to iterate over + * @param {function} iterator - iterator to run + * @param {function} sortMethod - custom sort function + * @param {function} callback - invoked when all elements processed + * @returns {function} - jobs terminator + */ +function serialOrdered(list, iterator, sortMethod, callback) +{ + var state = initState(list, sortMethod); + + iterate(list, iterator, state, function iteratorHandler(error, result) + { + if (error) + { + callback(error, result); + return; + } + + state.index++; + + // are we there yet? + if (state.index < (state['keyedList'] || list).length) + { + iterate(list, iterator, state, iteratorHandler); + return; + } + + // done here + callback(null, state.results); + }); + + return terminator.bind(state, callback); +} + +/* + * -- Sort methods + */ + +/** + * sort helper to sort array elements in ascending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function ascending(a, b) +{ + return a < b ? -1 : a > b ? 1 : 0; +} + +/** + * sort helper to sort array elements in descending order + * + * @param {mixed} a - an item to compare + * @param {mixed} b - an item to compare + * @returns {number} - comparison result + */ +function descending(a, b) +{ + return -1 * ascending(a, b); +} diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js new file mode 100644 index 0000000000..d43465f903 --- /dev/null +++ b/node_modules/asynckit/stream.js @@ -0,0 +1,21 @@ +var inherits = require('util').inherits + , Readable = require('stream').Readable + , ReadableAsyncKit = require('./lib/readable_asynckit.js') + , ReadableParallel = require('./lib/readable_parallel.js') + , ReadableSerial = require('./lib/readable_serial.js') + , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js') + ; + +// API +module.exports = +{ + parallel : ReadableParallel, + serial : ReadableSerial, + serialOrdered : ReadableSerialOrdered, +}; + +inherits(ReadableAsyncKit, Readable); + +inherits(ReadableParallel, ReadableAsyncKit); +inherits(ReadableSerial, ReadableAsyncKit); +inherits(ReadableSerialOrdered, ReadableAsyncKit); diff --git a/node_modules/combined-stream/License b/node_modules/combined-stream/License new file mode 100644 index 0000000000..4804b7ab41 --- /dev/null +++ b/node_modules/combined-stream/License @@ -0,0 +1,19 @@ +Copyright (c) 2011 Debuggable Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/combined-stream/Readme.md b/node_modules/combined-stream/Readme.md new file mode 100644 index 0000000000..9e367b5bc5 --- /dev/null +++ b/node_modules/combined-stream/Readme.md @@ -0,0 +1,138 @@ +# combined-stream + +A stream that emits multiple other streams one after another. + +**NB** Currently `combined-stream` works with streams version 1 only. There is ongoing effort to switch this library to streams version 2. Any help is welcome. :) Meanwhile you can explore other libraries that provide streams2 support with more or less compatibility with `combined-stream`. + +- [combined-stream2](https://www.npmjs.com/package/combined-stream2): A drop-in streams2-compatible replacement for the combined-stream module. + +- [multistream](https://www.npmjs.com/package/multistream): A stream that emits multiple other streams one after another. + +## Installation + +``` bash +npm install combined-stream +``` + +## Usage + +Here is a simple example that shows how you can use combined-stream to combine +two files into one: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create(); +combinedStream.append(fs.createReadStream('file1.txt')); +combinedStream.append(fs.createReadStream('file2.txt')); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +While the example above works great, it will pause all source streams until +they are needed. If you don't want that to happen, you can set `pauseStreams` +to `false`: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create({pauseStreams: false}); +combinedStream.append(fs.createReadStream('file1.txt')); +combinedStream.append(fs.createReadStream('file2.txt')); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +However, what if you don't have all the source streams yet, or you don't want +to allocate the resources (file descriptors, memory, etc.) for them right away? +Well, in that case you can simply provide a callback that supplies the stream +by calling a `next()` function: + +``` javascript +var CombinedStream = require('combined-stream'); +var fs = require('fs'); + +var combinedStream = CombinedStream.create(); +combinedStream.append(function(next) { + next(fs.createReadStream('file1.txt')); +}); +combinedStream.append(function(next) { + next(fs.createReadStream('file2.txt')); +}); + +combinedStream.pipe(fs.createWriteStream('combined.txt')); +``` + +## API + +### CombinedStream.create([options]) + +Returns a new combined stream object. Available options are: + +* `maxDataSize` +* `pauseStreams` + +The effect of those options is described below. + +### combinedStream.pauseStreams = `true` + +Whether to apply back pressure to the underlaying streams. If set to `false`, +the underlaying streams will never be paused. If set to `true`, the +underlaying streams will be paused right after being appended, as well as when +`delayedStream.pipe()` wants to throttle. + +### combinedStream.maxDataSize = `2 * 1024 * 1024` + +The maximum amount of bytes (or characters) to buffer for all source streams. +If this value is exceeded, `combinedStream` emits an `'error'` event. + +### combinedStream.dataSize = `0` + +The amount of bytes (or characters) currently buffered by `combinedStream`. + +### combinedStream.append(stream) + +Appends the given `stream` to the combinedStream object. If `pauseStreams` is +set to `true, this stream will also be paused right away. + +`streams` can also be a function that takes one parameter called `next`. `next` +is a function that must be invoked in order to provide the `next` stream, see +example above. + +Regardless of how the `stream` is appended, combined-stream always attaches an +`'error'` listener to it, so you don't have to do that manually. + +Special case: `stream` can also be a String or Buffer. + +### combinedStream.write(data) + +You should not call this, `combinedStream` takes care of piping the appended +streams into itself for you. + +### combinedStream.resume() + +Causes `combinedStream` to start drain the streams it manages. The function is +idempotent, and also emits a `'resume'` event each time which usually goes to +the stream that is currently being drained. + +### combinedStream.pause(); + +If `combinedStream.pauseStreams` is set to `false`, this does nothing. +Otherwise a `'pause'` event is emitted, this goes to the stream that is +currently being drained, so you can use it to apply back pressure. + +### combinedStream.end(); + +Sets `combinedStream.writable` to false, emits an `'end'` event, and removes +all streams from the queue. + +### combinedStream.destroy(); + +Same as `combinedStream.end()`, except it emits a `'close'` event instead of +`'end'`. + +## License + +combined-stream is licensed under the MIT license. diff --git a/node_modules/combined-stream/lib/combined_stream.js b/node_modules/combined-stream/lib/combined_stream.js new file mode 100644 index 0000000000..125f097f35 --- /dev/null +++ b/node_modules/combined-stream/lib/combined_stream.js @@ -0,0 +1,208 @@ +var util = require('util'); +var Stream = require('stream').Stream; +var DelayedStream = require('delayed-stream'); + +module.exports = CombinedStream; +function CombinedStream() { + this.writable = false; + this.readable = true; + this.dataSize = 0; + this.maxDataSize = 2 * 1024 * 1024; + this.pauseStreams = true; + + this._released = false; + this._streams = []; + this._currentStream = null; + this._insideLoop = false; + this._pendingNext = false; +} +util.inherits(CombinedStream, Stream); + +CombinedStream.create = function(options) { + var combinedStream = new this(); + + options = options || {}; + for (var option in options) { + combinedStream[option] = options[option]; + } + + return combinedStream; +}; + +CombinedStream.isStreamLike = function(stream) { + return (typeof stream !== 'function') + && (typeof stream !== 'string') + && (typeof stream !== 'boolean') + && (typeof stream !== 'number') + && (!Buffer.isBuffer(stream)); +}; + +CombinedStream.prototype.append = function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + + if (isStreamLike) { + if (!(stream instanceof DelayedStream)) { + var newStream = DelayedStream.create(stream, { + maxDataSize: Infinity, + pauseStream: this.pauseStreams, + }); + stream.on('data', this._checkDataSize.bind(this)); + stream = newStream; + } + + this._handleErrors(stream); + + if (this.pauseStreams) { + stream.pause(); + } + } + + this._streams.push(stream); + return this; +}; + +CombinedStream.prototype.pipe = function(dest, options) { + Stream.prototype.pipe.call(this, dest, options); + this.resume(); + return dest; +}; + +CombinedStream.prototype._getNext = function() { + this._currentStream = null; + + if (this._insideLoop) { + this._pendingNext = true; + return; // defer call + } + + this._insideLoop = true; + try { + do { + this._pendingNext = false; + this._realGetNext(); + } while (this._pendingNext); + } finally { + this._insideLoop = false; + } +}; + +CombinedStream.prototype._realGetNext = function() { + var stream = this._streams.shift(); + + + if (typeof stream == 'undefined') { + this.end(); + return; + } + + if (typeof stream !== 'function') { + this._pipeNext(stream); + return; + } + + var getStream = stream; + getStream(function(stream) { + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('data', this._checkDataSize.bind(this)); + this._handleErrors(stream); + } + + this._pipeNext(stream); + }.bind(this)); +}; + +CombinedStream.prototype._pipeNext = function(stream) { + this._currentStream = stream; + + var isStreamLike = CombinedStream.isStreamLike(stream); + if (isStreamLike) { + stream.on('end', this._getNext.bind(this)); + stream.pipe(this, {end: false}); + return; + } + + var value = stream; + this.write(value); + this._getNext(); +}; + +CombinedStream.prototype._handleErrors = function(stream) { + var self = this; + stream.on('error', function(err) { + self._emitError(err); + }); +}; + +CombinedStream.prototype.write = function(data) { + this.emit('data', data); +}; + +CombinedStream.prototype.pause = function() { + if (!this.pauseStreams) { + return; + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); + this.emit('pause'); +}; + +CombinedStream.prototype.resume = function() { + if (!this._released) { + this._released = true; + this.writable = true; + this._getNext(); + } + + if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); + this.emit('resume'); +}; + +CombinedStream.prototype.end = function() { + this._reset(); + this.emit('end'); +}; + +CombinedStream.prototype.destroy = function() { + this._reset(); + this.emit('close'); +}; + +CombinedStream.prototype._reset = function() { + this.writable = false; + this._streams = []; + this._currentStream = null; +}; + +CombinedStream.prototype._checkDataSize = function() { + this._updateDataSize(); + if (this.dataSize <= this.maxDataSize) { + return; + } + + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; + this._emitError(new Error(message)); +}; + +CombinedStream.prototype._updateDataSize = function() { + this.dataSize = 0; + + var self = this; + this._streams.forEach(function(stream) { + if (!stream.dataSize) { + return; + } + + self.dataSize += stream.dataSize; + }); + + if (this._currentStream && this._currentStream.dataSize) { + this.dataSize += this._currentStream.dataSize; + } +}; + +CombinedStream.prototype._emitError = function(err) { + this._reset(); + this.emit('error', err); +}; diff --git a/node_modules/combined-stream/package.json b/node_modules/combined-stream/package.json new file mode 100644 index 0000000000..6982b6da17 --- /dev/null +++ b/node_modules/combined-stream/package.json @@ -0,0 +1,25 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "combined-stream", + "description": "A stream that emits multiple other streams one after another.", + "version": "1.0.8", + "homepage": "https://github.com/felixge/node-combined-stream", + "repository": { + "type": "git", + "url": "git://github.com/felixge/node-combined-stream.git" + }, + "main": "./lib/combined_stream", + "scripts": { + "test": "node test/run.js" + }, + "engines": { + "node": ">= 0.8" + }, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "devDependencies": { + "far": "~0.0.7" + }, + "license": "MIT" +} diff --git a/node_modules/combined-stream/yarn.lock b/node_modules/combined-stream/yarn.lock new file mode 100644 index 0000000000..7edf41840c --- /dev/null +++ b/node_modules/combined-stream/yarn.lock @@ -0,0 +1,17 @@ +# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. +# yarn lockfile v1 + + +delayed-stream@~1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" + +far@~0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/far/-/far-0.0.7.tgz#01c1fd362bcd26ce9cf161af3938aa34619f79a7" + dependencies: + oop "0.0.3" + +oop@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/oop/-/oop-0.0.3.tgz#70fa405a5650891a194fdc82ca68dad6dabf4401" diff --git a/node_modules/delayed-stream/.npmignore b/node_modules/delayed-stream/.npmignore new file mode 100644 index 0000000000..9daeafb986 --- /dev/null +++ b/node_modules/delayed-stream/.npmignore @@ -0,0 +1 @@ +test diff --git a/node_modules/delayed-stream/License b/node_modules/delayed-stream/License new file mode 100644 index 0000000000..4804b7ab41 --- /dev/null +++ b/node_modules/delayed-stream/License @@ -0,0 +1,19 @@ +Copyright (c) 2011 Debuggable Limited + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/delayed-stream/Makefile b/node_modules/delayed-stream/Makefile new file mode 100644 index 0000000000..b4ff85a33b --- /dev/null +++ b/node_modules/delayed-stream/Makefile @@ -0,0 +1,7 @@ +SHELL := /bin/bash + +test: + @./test/run.js + +.PHONY: test + diff --git a/node_modules/delayed-stream/Readme.md b/node_modules/delayed-stream/Readme.md new file mode 100644 index 0000000000..aca36f9f0b --- /dev/null +++ b/node_modules/delayed-stream/Readme.md @@ -0,0 +1,141 @@ +# delayed-stream + +Buffers events from a stream until you are ready to handle them. + +## Installation + +``` bash +npm install delayed-stream +``` + +## Usage + +The following example shows how to write a http echo server that delays its +response by 1000 ms. + +``` javascript +var DelayedStream = require('delayed-stream'); +var http = require('http'); + +http.createServer(function(req, res) { + var delayed = DelayedStream.create(req); + + setTimeout(function() { + res.writeHead(200); + delayed.pipe(res); + }, 1000); +}); +``` + +If you are not using `Stream#pipe`, you can also manually release the buffered +events by calling `delayedStream.resume()`: + +``` javascript +var delayed = DelayedStream.create(req); + +setTimeout(function() { + // Emit all buffered events and resume underlaying source + delayed.resume(); +}, 1000); +``` + +## Implementation + +In order to use this meta stream properly, here are a few things you should +know about the implementation. + +### Event Buffering / Proxying + +All events of the `source` stream are hijacked by overwriting the `source.emit` +method. Until node implements a catch-all event listener, this is the only way. + +However, delayed-stream still continues to emit all events it captures on the +`source`, regardless of whether you have released the delayed stream yet or +not. + +Upon creation, delayed-stream captures all `source` events and stores them in +an internal event buffer. Once `delayedStream.release()` is called, all +buffered events are emitted on the `delayedStream`, and the event buffer is +cleared. After that, delayed-stream merely acts as a proxy for the underlaying +source. + +### Error handling + +Error events on `source` are buffered / proxied just like any other events. +However, `delayedStream.create` attaches a no-op `'error'` listener to the +`source`. This way you only have to handle errors on the `delayedStream` +object, rather than in two places. + +### Buffer limits + +delayed-stream provides a `maxDataSize` property that can be used to limit +the amount of data being buffered. In order to protect you from bad `source` +streams that don't react to `source.pause()`, this feature is enabled by +default. + +## API + +### DelayedStream.create(source, [options]) + +Returns a new `delayedStream`. Available options are: + +* `pauseStream` +* `maxDataSize` + +The description for those properties can be found below. + +### delayedStream.source + +The `source` stream managed by this object. This is useful if you are +passing your `delayedStream` around, and you still want to access properties +on the `source` object. + +### delayedStream.pauseStream = true + +Whether to pause the underlaying `source` when calling +`DelayedStream.create()`. Modifying this property afterwards has no effect. + +### delayedStream.maxDataSize = 1024 * 1024 + +The amount of data to buffer before emitting an `error`. + +If the underlaying source is emitting `Buffer` objects, the `maxDataSize` +refers to bytes. + +If the underlaying source is emitting JavaScript strings, the size refers to +characters. + +If you know what you are doing, you can set this property to `Infinity` to +disable this feature. You can also modify this property during runtime. + +### delayedStream.dataSize = 0 + +The amount of data buffered so far. + +### delayedStream.readable + +An ECMA5 getter that returns the value of `source.readable`. + +### delayedStream.resume() + +If the `delayedStream` has not been released so far, `delayedStream.release()` +is called. + +In either case, `source.resume()` is called. + +### delayedStream.pause() + +Calls `source.pause()`. + +### delayedStream.pipe(dest) + +Calls `delayedStream.resume()` and then proxies the arguments to `source.pipe`. + +### delayedStream.release() + +Emits and clears all events that have been buffered up so far. This does not +resume the underlaying source, use `delayedStream.resume()` instead. + +## License + +delayed-stream is licensed under the MIT license. diff --git a/node_modules/delayed-stream/lib/delayed_stream.js b/node_modules/delayed-stream/lib/delayed_stream.js new file mode 100644 index 0000000000..b38fc85ff4 --- /dev/null +++ b/node_modules/delayed-stream/lib/delayed_stream.js @@ -0,0 +1,107 @@ +var Stream = require('stream').Stream; +var util = require('util'); + +module.exports = DelayedStream; +function DelayedStream() { + this.source = null; + this.dataSize = 0; + this.maxDataSize = 1024 * 1024; + this.pauseStream = true; + + this._maxDataSizeExceeded = false; + this._released = false; + this._bufferedEvents = []; +} +util.inherits(DelayedStream, Stream); + +DelayedStream.create = function(source, options) { + var delayedStream = new this(); + + options = options || {}; + for (var option in options) { + delayedStream[option] = options[option]; + } + + delayedStream.source = source; + + var realEmit = source.emit; + source.emit = function() { + delayedStream._handleEmit(arguments); + return realEmit.apply(source, arguments); + }; + + source.on('error', function() {}); + if (delayedStream.pauseStream) { + source.pause(); + } + + return delayedStream; +}; + +Object.defineProperty(DelayedStream.prototype, 'readable', { + configurable: true, + enumerable: true, + get: function() { + return this.source.readable; + } +}); + +DelayedStream.prototype.setEncoding = function() { + return this.source.setEncoding.apply(this.source, arguments); +}; + +DelayedStream.prototype.resume = function() { + if (!this._released) { + this.release(); + } + + this.source.resume(); +}; + +DelayedStream.prototype.pause = function() { + this.source.pause(); +}; + +DelayedStream.prototype.release = function() { + this._released = true; + + this._bufferedEvents.forEach(function(args) { + this.emit.apply(this, args); + }.bind(this)); + this._bufferedEvents = []; +}; + +DelayedStream.prototype.pipe = function() { + var r = Stream.prototype.pipe.apply(this, arguments); + this.resume(); + return r; +}; + +DelayedStream.prototype._handleEmit = function(args) { + if (this._released) { + this.emit.apply(this, args); + return; + } + + if (args[0] === 'data') { + this.dataSize += args[1].length; + this._checkIfMaxDataSizeExceeded(); + } + + this._bufferedEvents.push(args); +}; + +DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { + if (this._maxDataSizeExceeded) { + return; + } + + if (this.dataSize <= this.maxDataSize) { + return; + } + + this._maxDataSizeExceeded = true; + var message = + 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' + this.emit('error', new Error(message)); +}; diff --git a/node_modules/delayed-stream/package.json b/node_modules/delayed-stream/package.json new file mode 100644 index 0000000000..eea3291c54 --- /dev/null +++ b/node_modules/delayed-stream/package.json @@ -0,0 +1,27 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "contributors": [ + "Mike Atkins " + ], + "name": "delayed-stream", + "description": "Buffers events from a stream until you are ready to handle them.", + "license": "MIT", + "version": "1.0.0", + "homepage": "https://github.com/felixge/node-delayed-stream", + "repository": { + "type": "git", + "url": "git://github.com/felixge/node-delayed-stream.git" + }, + "main": "./lib/delayed_stream", + "engines": { + "node": ">=0.4.0" + }, + "scripts": { + "test": "make test" + }, + "dependencies": {}, + "devDependencies": { + "fake": "0.2.0", + "far": "0.0.1" + } +} diff --git a/node_modules/event-target-shim/LICENSE b/node_modules/event-target-shim/LICENSE new file mode 100644 index 0000000000..c39e6949ed --- /dev/null +++ b/node_modules/event-target-shim/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Toru Nagashima + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/event-target-shim/README.md b/node_modules/event-target-shim/README.md new file mode 100644 index 0000000000..a4f9c1b7ef --- /dev/null +++ b/node_modules/event-target-shim/README.md @@ -0,0 +1,293 @@ +# event-target-shim + +[![npm version](https://img.shields.io/npm/v/event-target-shim.svg)](https://www.npmjs.com/package/event-target-shim) +[![Downloads/month](https://img.shields.io/npm/dm/event-target-shim.svg)](http://www.npmtrends.com/event-target-shim) +[![Build Status](https://travis-ci.org/mysticatea/event-target-shim.svg?branch=master)](https://travis-ci.org/mysticatea/event-target-shim) +[![Coverage Status](https://codecov.io/gh/mysticatea/event-target-shim/branch/master/graph/badge.svg)](https://codecov.io/gh/mysticatea/event-target-shim) +[![Dependency Status](https://david-dm.org/mysticatea/event-target-shim.svg)](https://david-dm.org/mysticatea/event-target-shim) + +An implementation of [WHATWG EventTarget interface](https://dom.spec.whatwg.org/#interface-eventtarget), plus few extensions. + +- This provides `EventTarget` constructor that can inherit for your custom object. +- This provides an utility that defines properties of attribute listeners (e.g. `obj.onclick`). + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" + +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Use +const foo = new Foo() +foo.addEventListener("hello", e => console.log("hello", e)) +foo.onhello = e => console.log("onhello:", e) +foo.dispatchEvent(new CustomEvent("hello")) +``` + +## 💿 Installation + +Use [npm](https://www.npmjs.com/) to install then use a bundler. + +``` +npm install event-target-shim +``` + +Or download from [`dist` directory](./dist). + +- [dist/event-target-shim.mjs](dist/event-target-shim.mjs) ... ES modules version. +- [dist/event-target-shim.js](dist/event-target-shim.js) ... Common JS version. +- [dist/event-target-shim.umd.js](dist/event-target-shim.umd.js) ... UMD (Universal Module Definition) version. This is transpiled by [Babel](https://babeljs.io/) for IE 11. + +## 📖 Usage + +```js +import {EventTarget, defineEventAttribute} from "event-target-shim" +// or +const {EventTarget, defineEventAttribute} = require("event-target-shim") + +// or UMD version defines a global variable: +const {EventTarget, defineEventAttribute} = window.EventTargetShim +``` + +### EventTarget + +> https://dom.spec.whatwg.org/#interface-eventtarget + +#### eventTarget.addEventListener(type, callback, options) + +Register an event listener. + +- `type` is a string. This is the event name to register. +- `callback` is a function. This is the event listener to register. +- `options` is a boolean or an object `{ capture?: boolean, passive?: boolean, once?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + - `passive` is the flag to ignore `event.preventDefault()` method in the event listener. + - `once` is the flag to remove the event listener automatically after the first call. + +#### eventTarget.removeEventListener(type, callback, options) + +Unregister an event listener. + +- `type` is a string. This is the event name to unregister. +- `callback` is a function. This is the event listener to unregister. +- `options` is a boolean or an object `{ capture?: boolean }`. If this is a boolean, it's same meaning as `{ capture: options }`. + - `capture` is the flag to register the event listener for capture phase. + +#### eventTarget.dispatchEvent(event) + +Dispatch an event. + +- `event` is a [Event](https://dom.spec.whatwg.org/#event) object or an object `{ type: string, [key: string]: any }`. The latter is non-standard but useful. In both cases, listeners receive the event as implementing [Event](https://dom.spec.whatwg.org/#event) interface. + +### defineEventAttribute(proto, type) + +Define an event attribute (e.g. `onclick`) to `proto`. This is non-standard. + +- `proto` is an object (assuming it's a prototype object). This function defines a getter/setter pair for the event attribute. +- `type` is a string. This is the event name to define. + +For example: + +```js +class AbortSignal extends EventTarget { + constructor() { + this.aborted = false + } +} +// Define `onabort` property. +defineEventAttribute(AbortSignal.prototype, "abort") +``` + +### EventTarget(types) + +Define a custom `EventTarget` class with event attributes. This is non-standard. + +- `types` is a string or an array of strings. This is the event name to define. + +For example: + +```js +// This has `onabort` property. +class AbortSignal extends EventTarget("abort") { + constructor() { + this.aborted = false + } +} +``` + +## 📚 Examples + +### ES2015 and later + +> https://jsfiddle.net/636vea92/ + +```js +const {EventTarget, defineEventAttribute} = EventTargetShim + +// Define a derived class. +class Foo extends EventTarget { + // ... +} + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e) +}) +foo.onhello = (e) => { + console.log("onhello", e) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +### Typescript + +```ts +import { EventTarget, defineEventAttribute } from "event-target-shim"; + +// Define events +type FooEvents = { + hello: CustomEvent +} +type FooEventAttributes = { + onhello: CustomEvent +} + +// Define a derived class. +class Foo extends EventTarget { + // ... +} +// Define `foo.onhello` property's implementation. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +const foo = new Foo() +foo.addEventListener("hello", (e) => { + console.log("hello", e.detail) +}) +foo.onhello = (e) => { + console.log("onhello", e.detail) +} + +// Dispatching events +foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +``` + +Unfortunately, both `FooEvents` and `FooEventAttributes` are needed because TypeScript doesn't allow the mutation of string literal types. If TypeScript allowed us to compute `"onhello"` from `"hello"` in types, `FooEventAttributes` will be optional. + +This `EventTarget` type is compatible with `EventTarget` interface of `lib.dom.d.ts`. + +#### To disallow unknown events + +By default, methods such as `addEventListener` accept unknown events. You can disallow unknown events by the third type parameter `"strict"`. + +```ts +type FooEvents = { + hello: CustomEvent +} +class Foo extends EventTarget { + // ... +} + +// OK because `hello` is defined in FooEvents. +foo.addEventListener("hello", (e) => { +}) +// Error because `unknown` is not defined in FooEvents. +foo.addEventListener("unknown", (e) => { +}) +``` + +However, if you use `"strict"` parameter, it loses compatibility with `EventTarget` interface of `lib.dom.d.ts`. + +#### To infer the type of `dispatchEvent()` method + +TypeScript cannot infer the event type of `dispatchEvent()` method properly from the argument in most cases. You can improve this behavior with the following steps: + +1. Use the third type parameter `"strict"`. This prevents inferring to `dispatchEvent()`. +2. Make the `type` property of event definitions stricter. + +```ts +type FooEvents = { + hello: CustomEvent & { type: "hello" } + hey: Event & { type: "hey" } +} +class Foo extends EventTarget { + // ... +} + +// Error because `detail` property is lacking. +foo.dispatchEvent({ type: "hello" }) +``` + +### ES5 + +> https://jsfiddle.net/522zc9de/ + +```js +// Define a derived class. +function Foo() { + EventTarget.call(this) +} +Foo.prototype = Object.create(EventTarget.prototype, { + constructor: { value: Foo, configurable: true, writable: true } + // ... +}) + +// Define `foo.onhello` property. +defineEventAttribute(Foo.prototype, "hello") + +// Register event listeners. +var foo = new Foo() +foo.addEventListener("hello", function(e) { + console.log("hello", e) +}) +foo.onhello = function(e) { + console.log("onhello", e) +} + +// Dispatching events +function isSupportEventConstrucor() { // IE does not support. + try { + new CusomEvent("hello") + return true + } catch (_err) { + return false + } +} +if (isSupportEventConstrucor()) { + foo.dispatchEvent(new CustomEvent("hello", { detail: "detail" })) +} else { + var e = document.createEvent("CustomEvent") + e.initCustomEvent("hello", false, false, "detail") + foo.dispatchEvent(e) +} +``` + +## 📰 Changelog + +- See [GitHub releases](https://github.com/mysticatea/event-target-shim/releases). + +## 🍻 Contributing + +Contributing is welcome ❤️ + +Please use GitHub issues/PRs. + +### Development tools + +- `npm install` installs dependencies for development. +- `npm test` runs tests and measures code coverage. +- `npm run clean` removes temporary files of tests. +- `npm run coverage` opens code coverage of the previous test with your default browser. +- `npm run lint` runs ESLint. +- `npm run build` generates `dist` codes. +- `npm run watch` runs tests on each file change. diff --git a/node_modules/event-target-shim/dist/event-target-shim.js b/node_modules/event-target-shim/dist/event-target-shim.js new file mode 100644 index 0000000000..53ce22036e --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js @@ -0,0 +1,871 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +exports.defineEventAttribute = defineEventAttribute; +exports.EventTarget = EventTarget; +exports.default = EventTarget; + +module.exports = EventTarget +module.exports.EventTarget = module.exports["default"] = EventTarget +module.exports.defineEventAttribute = defineEventAttribute +//# sourceMappingURL=event-target-shim.js.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.js.map b/node_modules/event-target-shim/dist/event-target-shim.js.map new file mode 100644 index 0000000000..83c5f626aa --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.js.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.js","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;;;;;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs b/node_modules/event-target-shim/dist/event-target-shim.mjs new file mode 100644 index 0000000000..114f3a1711 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs @@ -0,0 +1,862 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */ +/** + * @typedef {object} PrivateData + * @property {EventTarget} eventTarget The event target. + * @property {{type:string}} event The original event object. + * @property {number} eventPhase The current event phase. + * @property {EventTarget|null} currentTarget The current event target. + * @property {boolean} canceled The flag to prevent default. + * @property {boolean} stopped The flag to stop propagation. + * @property {boolean} immediateStopped The flag to stop propagation immediately. + * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. + * @property {number} timeStamp The unix time. + * @private + */ + +/** + * Private data for event wrappers. + * @type {WeakMap} + * @private + */ +const privateData = new WeakMap(); + +/** + * Cache for wrapper classes. + * @type {WeakMap} + * @private + */ +const wrappers = new WeakMap(); + +/** + * Get private data. + * @param {Event} event The event object to get private data. + * @returns {PrivateData} The private data of the event. + * @private + */ +function pd(event) { + const retv = privateData.get(event); + console.assert( + retv != null, + "'this' is expected an Event object, but got", + event + ); + return retv +} + +/** + * https://dom.spec.whatwg.org/#set-the-canceled-flag + * @param data {PrivateData} private data. + */ +function setCancelFlag(data) { + if (data.passiveListener != null) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error( + "Unable to preventDefault inside passive event listener invocation.", + data.passiveListener + ); + } + return + } + if (!data.event.cancelable) { + return + } + + data.canceled = true; + if (typeof data.event.preventDefault === "function") { + data.event.preventDefault(); + } +} + +/** + * @see https://dom.spec.whatwg.org/#interface-event + * @private + */ +/** + * The event wrapper. + * @constructor + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Event|{type:string}} event The original event to wrap. + */ +function Event(eventTarget, event) { + privateData.set(this, { + eventTarget, + event, + eventPhase: 2, + currentTarget: eventTarget, + canceled: false, + stopped: false, + immediateStopped: false, + passiveListener: null, + timeStamp: event.timeStamp || Date.now(), + }); + + // https://heycam.github.io/webidl/#Unforgeable + Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); + + // Define accessors + const keys = Object.keys(event); + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in this)) { + Object.defineProperty(this, key, defineRedirectDescriptor(key)); + } + } +} + +// Should be enumerable, but class methods are not enumerable. +Event.prototype = { + /** + * The type of this event. + * @type {string} + */ + get type() { + return pd(this).event.type + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get target() { + return pd(this).eventTarget + }, + + /** + * The target of this event. + * @type {EventTarget} + */ + get currentTarget() { + return pd(this).currentTarget + }, + + /** + * @returns {EventTarget[]} The composed path of this event. + */ + composedPath() { + const currentTarget = pd(this).currentTarget; + if (currentTarget == null) { + return [] + } + return [currentTarget] + }, + + /** + * Constant of NONE. + * @type {number} + */ + get NONE() { + return 0 + }, + + /** + * Constant of CAPTURING_PHASE. + * @type {number} + */ + get CAPTURING_PHASE() { + return 1 + }, + + /** + * Constant of AT_TARGET. + * @type {number} + */ + get AT_TARGET() { + return 2 + }, + + /** + * Constant of BUBBLING_PHASE. + * @type {number} + */ + get BUBBLING_PHASE() { + return 3 + }, + + /** + * The target of this event. + * @type {number} + */ + get eventPhase() { + return pd(this).eventPhase + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopPropagation() { + const data = pd(this); + + data.stopped = true; + if (typeof data.event.stopPropagation === "function") { + data.event.stopPropagation(); + } + }, + + /** + * Stop event bubbling. + * @returns {void} + */ + stopImmediatePropagation() { + const data = pd(this); + + data.stopped = true; + data.immediateStopped = true; + if (typeof data.event.stopImmediatePropagation === "function") { + data.event.stopImmediatePropagation(); + } + }, + + /** + * The flag to be bubbling. + * @type {boolean} + */ + get bubbles() { + return Boolean(pd(this).event.bubbles) + }, + + /** + * The flag to be cancelable. + * @type {boolean} + */ + get cancelable() { + return Boolean(pd(this).event.cancelable) + }, + + /** + * Cancel this event. + * @returns {void} + */ + preventDefault() { + setCancelFlag(pd(this)); + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + */ + get defaultPrevented() { + return pd(this).canceled + }, + + /** + * The flag to be composed. + * @type {boolean} + */ + get composed() { + return Boolean(pd(this).event.composed) + }, + + /** + * The unix time of this event. + * @type {number} + */ + get timeStamp() { + return pd(this).timeStamp + }, + + /** + * The target of this event. + * @type {EventTarget} + * @deprecated + */ + get srcElement() { + return pd(this).eventTarget + }, + + /** + * The flag to stop event bubbling. + * @type {boolean} + * @deprecated + */ + get cancelBubble() { + return pd(this).stopped + }, + set cancelBubble(value) { + if (!value) { + return + } + const data = pd(this); + + data.stopped = true; + if (typeof data.event.cancelBubble === "boolean") { + data.event.cancelBubble = true; + } + }, + + /** + * The flag to indicate cancellation state. + * @type {boolean} + * @deprecated + */ + get returnValue() { + return !pd(this).canceled + }, + set returnValue(value) { + if (!value) { + setCancelFlag(pd(this)); + } + }, + + /** + * Initialize this event object. But do nothing under event dispatching. + * @param {string} type The event type. + * @param {boolean} [bubbles=false] The flag to be possible to bubble up. + * @param {boolean} [cancelable=false] The flag to be possible to cancel. + * @deprecated + */ + initEvent() { + // Do nothing. + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(Event.prototype, "constructor", { + value: Event, + configurable: true, + writable: true, +}); + +// Ensure `event instanceof window.Event` is `true`. +if (typeof window !== "undefined" && typeof window.Event !== "undefined") { + Object.setPrototypeOf(Event.prototype, window.Event.prototype); + + // Make association for wrappers. + wrappers.set(window.Event.prototype, Event); +} + +/** + * Get the property descriptor to redirect a given property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to redirect the property. + * @private + */ +function defineRedirectDescriptor(key) { + return { + get() { + return pd(this).event[key] + }, + set(value) { + pd(this).event[key] = value; + }, + configurable: true, + enumerable: true, + } +} + +/** + * Get the property descriptor to call a given method property. + * @param {string} key Property name to define property descriptor. + * @returns {PropertyDescriptor} The property descriptor to call the method property. + * @private + */ +function defineCallDescriptor(key) { + return { + value() { + const event = pd(this).event; + return event[key].apply(event, arguments) + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define new wrapper class. + * @param {Function} BaseEvent The base wrapper class. + * @param {Object} proto The prototype of the original event. + * @returns {Function} The defined wrapper class. + * @private + */ +function defineWrapper(BaseEvent, proto) { + const keys = Object.keys(proto); + if (keys.length === 0) { + return BaseEvent + } + + /** CustomEvent */ + function CustomEvent(eventTarget, event) { + BaseEvent.call(this, eventTarget, event); + } + + CustomEvent.prototype = Object.create(BaseEvent.prototype, { + constructor: { value: CustomEvent, configurable: true, writable: true }, + }); + + // Define accessors. + for (let i = 0; i < keys.length; ++i) { + const key = keys[i]; + if (!(key in BaseEvent.prototype)) { + const descriptor = Object.getOwnPropertyDescriptor(proto, key); + const isFunc = typeof descriptor.value === "function"; + Object.defineProperty( + CustomEvent.prototype, + key, + isFunc + ? defineCallDescriptor(key) + : defineRedirectDescriptor(key) + ); + } + } + + return CustomEvent +} + +/** + * Get the wrapper class of a given prototype. + * @param {Object} proto The prototype of the original event to get its wrapper. + * @returns {Function} The wrapper class. + * @private + */ +function getWrapper(proto) { + if (proto == null || proto === Object.prototype) { + return Event + } + + let wrapper = wrappers.get(proto); + if (wrapper == null) { + wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); + wrappers.set(proto, wrapper); + } + return wrapper +} + +/** + * Wrap a given event to management a dispatching. + * @param {EventTarget} eventTarget The event target of this dispatching. + * @param {Object} event The event to wrap. + * @returns {Event} The wrapper instance. + * @private + */ +function wrapEvent(eventTarget, event) { + const Wrapper = getWrapper(Object.getPrototypeOf(event)); + return new Wrapper(eventTarget, event) +} + +/** + * Get the immediateStopped flag of a given event. + * @param {Event} event The event to get. + * @returns {boolean} The flag to stop propagation immediately. + * @private + */ +function isStopped(event) { + return pd(event).immediateStopped +} + +/** + * Set the current event phase of a given event. + * @param {Event} event The event to set current target. + * @param {number} eventPhase New event phase. + * @returns {void} + * @private + */ +function setEventPhase(event, eventPhase) { + pd(event).eventPhase = eventPhase; +} + +/** + * Set the current target of a given event. + * @param {Event} event The event to set current target. + * @param {EventTarget|null} currentTarget New current target. + * @returns {void} + * @private + */ +function setCurrentTarget(event, currentTarget) { + pd(event).currentTarget = currentTarget; +} + +/** + * Set a passive listener of a given event. + * @param {Event} event The event to set current target. + * @param {Function|null} passiveListener New passive listener. + * @returns {void} + * @private + */ +function setPassiveListener(event, passiveListener) { + pd(event).passiveListener = passiveListener; +} + +/** + * @typedef {object} ListenerNode + * @property {Function} listener + * @property {1|2|3} listenerType + * @property {boolean} passive + * @property {boolean} once + * @property {ListenerNode|null} next + * @private + */ + +/** + * @type {WeakMap>} + * @private + */ +const listenersMap = new WeakMap(); + +// Listener types +const CAPTURE = 1; +const BUBBLE = 2; +const ATTRIBUTE = 3; + +/** + * Check whether a given value is an object or not. + * @param {any} x The value to check. + * @returns {boolean} `true` if the value is an object. + */ +function isObject(x) { + return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax +} + +/** + * Get listeners. + * @param {EventTarget} eventTarget The event target to get. + * @returns {Map} The listeners. + * @private + */ +function getListeners(eventTarget) { + const listeners = listenersMap.get(eventTarget); + if (listeners == null) { + throw new TypeError( + "'this' is expected an EventTarget object, but got another value." + ) + } + return listeners +} + +/** + * Get the property descriptor for the event attribute of a given event. + * @param {string} eventName The event name to get property descriptor. + * @returns {PropertyDescriptor} The property descriptor. + * @private + */ +function defineEventAttributeDescriptor(eventName) { + return { + get() { + const listeners = getListeners(this); + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + return node.listener + } + node = node.next; + } + return null + }, + + set(listener) { + if (typeof listener !== "function" && !isObject(listener)) { + listener = null; // eslint-disable-line no-param-reassign + } + const listeners = getListeners(this); + + // Traverse to the tail while removing old value. + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if (node.listenerType === ATTRIBUTE) { + // Remove old value. + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + node = node.next; + } + + // Add new value. + if (listener !== null) { + const newNode = { + listener, + listenerType: ATTRIBUTE, + passive: false, + once: false, + next: null, + }; + if (prev === null) { + listeners.set(eventName, newNode); + } else { + prev.next = newNode; + } + } + }, + configurable: true, + enumerable: true, + } +} + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. + * @param {string} eventName The event name to define. + * @returns {void} + */ +function defineEventAttribute(eventTargetPrototype, eventName) { + Object.defineProperty( + eventTargetPrototype, + `on${eventName}`, + defineEventAttributeDescriptor(eventName) + ); +} + +/** + * Define a custom EventTarget with event attributes. + * @param {string[]} eventNames Event names for event attributes. + * @returns {EventTarget} The custom EventTarget. + * @private + */ +function defineCustomEventTarget(eventNames) { + /** CustomEventTarget */ + function CustomEventTarget() { + EventTarget.call(this); + } + + CustomEventTarget.prototype = Object.create(EventTarget.prototype, { + constructor: { + value: CustomEventTarget, + configurable: true, + writable: true, + }, + }); + + for (let i = 0; i < eventNames.length; ++i) { + defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); + } + + return CustomEventTarget +} + +/** + * EventTarget. + * + * - This is constructor if no arguments. + * - This is a function which returns a CustomEventTarget constructor if there are arguments. + * + * For example: + * + * class A extends EventTarget {} + * class B extends EventTarget("message") {} + * class C extends EventTarget("message", "error") {} + * class D extends EventTarget(["message", "error"]) {} + */ +function EventTarget() { + /*eslint-disable consistent-return */ + if (this instanceof EventTarget) { + listenersMap.set(this, new Map()); + return + } + if (arguments.length === 1 && Array.isArray(arguments[0])) { + return defineCustomEventTarget(arguments[0]) + } + if (arguments.length > 0) { + const types = new Array(arguments.length); + for (let i = 0; i < arguments.length; ++i) { + types[i] = arguments[i]; + } + return defineCustomEventTarget(types) + } + throw new TypeError("Cannot call a class as a function") + /*eslint-enable consistent-return */ +} + +// Should be enumerable, but class methods are not enumerable. +EventTarget.prototype = { + /** + * Add a given listener to this event target. + * @param {string} eventName The event name to add. + * @param {Function} listener The listener to add. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + addEventListener(eventName, listener, options) { + if (listener == null) { + return + } + if (typeof listener !== "function" && !isObject(listener)) { + throw new TypeError("'listener' should be a function or an object.") + } + + const listeners = getListeners(this); + const optionsIsObj = isObject(options); + const capture = optionsIsObj + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + const newNode = { + listener, + listenerType, + passive: optionsIsObj && Boolean(options.passive), + once: optionsIsObj && Boolean(options.once), + next: null, + }; + + // Set it as the first node if the first node is null. + let node = listeners.get(eventName); + if (node === undefined) { + listeners.set(eventName, newNode); + return + } + + // Traverse to the tail while checking duplication.. + let prev = null; + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + // Should ignore duplication. + return + } + prev = node; + node = node.next; + } + + // Add it. + prev.next = newNode; + }, + + /** + * Remove a given listener from this event target. + * @param {string} eventName The event name to remove. + * @param {Function} listener The listener to remove. + * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. + * @returns {void} + */ + removeEventListener(eventName, listener, options) { + if (listener == null) { + return + } + + const listeners = getListeners(this); + const capture = isObject(options) + ? Boolean(options.capture) + : Boolean(options); + const listenerType = capture ? CAPTURE : BUBBLE; + + let prev = null; + let node = listeners.get(eventName); + while (node != null) { + if ( + node.listener === listener && + node.listenerType === listenerType + ) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + return + } + + prev = node; + node = node.next; + } + }, + + /** + * Dispatch a given event. + * @param {Event|{type:string}} event The event to dispatch. + * @returns {boolean} `false` if canceled. + */ + dispatchEvent(event) { + if (event == null || typeof event.type !== "string") { + throw new TypeError('"event.type" should be a string.') + } + + // If listeners aren't registered, terminate. + const listeners = getListeners(this); + const eventName = event.type; + let node = listeners.get(eventName); + if (node == null) { + return true + } + + // Since we cannot rewrite several properties, so wrap object. + const wrappedEvent = wrapEvent(this, event); + + // This doesn't process capturing phase and bubbling phase. + // This isn't participating in a tree. + let prev = null; + while (node != null) { + // Remove this listener if it's once + if (node.once) { + if (prev !== null) { + prev.next = node.next; + } else if (node.next !== null) { + listeners.set(eventName, node.next); + } else { + listeners.delete(eventName); + } + } else { + prev = node; + } + + // Call this listener + setPassiveListener( + wrappedEvent, + node.passive ? node.listener : null + ); + if (typeof node.listener === "function") { + try { + node.listener.call(this, wrappedEvent); + } catch (err) { + if ( + typeof console !== "undefined" && + typeof console.error === "function" + ) { + console.error(err); + } + } + } else if ( + node.listenerType !== ATTRIBUTE && + typeof node.listener.handleEvent === "function" + ) { + node.listener.handleEvent(wrappedEvent); + } + + // Break if `event.stopImmediatePropagation` was called. + if (isStopped(wrappedEvent)) { + break + } + + node = node.next; + } + setPassiveListener(wrappedEvent, null); + setEventPhase(wrappedEvent, 0); + setCurrentTarget(wrappedEvent, null); + + return !wrappedEvent.defaultPrevented + }, +}; + +// `constructor` is not enumerable. +Object.defineProperty(EventTarget.prototype, "constructor", { + value: EventTarget, + configurable: true, + writable: true, +}); + +// Ensure `eventTarget instanceof window.EventTarget` is `true`. +if ( + typeof window !== "undefined" && + typeof window.EventTarget !== "undefined" +) { + Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); +} + +export default EventTarget; +export { defineEventAttribute, EventTarget }; +//# sourceMappingURL=event-target-shim.mjs.map diff --git a/node_modules/event-target-shim/dist/event-target-shim.mjs.map b/node_modules/event-target-shim/dist/event-target-shim.mjs.map new file mode 100644 index 0000000000..57b3e8f7c0 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"event-target-shim.mjs","sources":["../src/event.mjs","../src/event-target.mjs"],"sourcesContent":["/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":[],"mappings":";;;;;AAAA;;;;;;;;;;;;;;;;;;;AAmBA,MAAM,WAAW,GAAG,IAAI,OAAO,GAAE;;;;;;;AAOjC,MAAM,QAAQ,GAAG,IAAI,OAAO,GAAE;;;;;;;;AAQ9B,SAAS,EAAE,CAAC,KAAK,EAAE;IACf,MAAM,IAAI,GAAG,WAAW,CAAC,GAAG,CAAC,KAAK,EAAC;IACnC,OAAO,CAAC,MAAM;QACV,IAAI,IAAI,IAAI;QACZ,6CAA6C;QAC7C,KAAK;MACR;IACD,OAAO,IAAI;CACd;;;;;;AAMD,SAAS,aAAa,CAAC,IAAI,EAAE;IACzB,IAAI,IAAI,CAAC,eAAe,IAAI,IAAI,EAAE;QAC9B;YACI,OAAO,OAAO,KAAK,WAAW;YAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;UACrC;YACE,OAAO,CAAC,KAAK;gBACT,oEAAoE;gBACpE,IAAI,CAAC,eAAe;cACvB;SACJ;QACD,MAAM;KACT;IACD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,UAAU,EAAE;QACxB,MAAM;KACT;;IAED,IAAI,CAAC,QAAQ,GAAG,KAAI;IACpB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,cAAc,KAAK,UAAU,EAAE;QACjD,IAAI,CAAC,KAAK,CAAC,cAAc,GAAE;KAC9B;CACJ;;;;;;;;;;;;AAYD,SAAS,KAAK,CAAC,WAAW,EAAE,KAAK,EAAE;IAC/B,WAAW,CAAC,GAAG,CAAC,IAAI,EAAE;QAClB,WAAW;QACX,KAAK;QACL,UAAU,EAAE,CAAC;QACb,aAAa,EAAE,WAAW;QAC1B,QAAQ,EAAE,KAAK;QACf,OAAO,EAAE,KAAK;QACd,gBAAgB,EAAE,KAAK;QACvB,eAAe,EAAE,IAAI;QACrB,SAAS,EAAE,KAAK,CAAC,SAAS,IAAI,IAAI,CAAC,GAAG,EAAE;KAC3C,EAAC;;;IAGF,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,WAAW,EAAE,EAAE,KAAK,EAAE,KAAK,EAAE,UAAU,EAAE,IAAI,EAAE,EAAC;;;IAG5E,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE;YAChB,MAAM,CAAC,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,wBAAwB,CAAC,GAAG,CAAC,EAAC;SAClE;KACJ;CACJ;;;AAGD,KAAK,CAAC,SAAS,GAAG;;;;;IAKd,IAAI,IAAI,GAAG;QACP,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI;KAC7B;;;;;;IAMD,IAAI,MAAM,GAAG;QACT,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;IAMD,IAAI,aAAa,GAAG;QAChB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,aAAa;KAChC;;;;;IAKD,YAAY,GAAG;QACX,MAAM,aAAa,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,cAAa;QAC5C,IAAI,aAAa,IAAI,IAAI,EAAE;YACvB,OAAO,EAAE;SACZ;QACD,OAAO,CAAC,aAAa,CAAC;KACzB;;;;;;IAMD,IAAI,IAAI,GAAG;QACP,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,eAAe,GAAG;QAClB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,cAAc,GAAG;QACjB,OAAO,CAAC;KACX;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,UAAU;KAC7B;;;;;;IAMD,eAAe,GAAG;QACd,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,eAAe,KAAK,UAAU,EAAE;YAClD,IAAI,CAAC,KAAK,CAAC,eAAe,GAAE;SAC/B;KACJ;;;;;;IAMD,wBAAwB,GAAG;QACvB,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,CAAC,gBAAgB,GAAG,KAAI;QAC5B,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,wBAAwB,KAAK,UAAU,EAAE;YAC3D,IAAI,CAAC,KAAK,CAAC,wBAAwB,GAAE;SACxC;KACJ;;;;;;IAMD,IAAI,OAAO,GAAG;QACV,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC;KACzC;;;;;;IAMD,IAAI,UAAU,GAAG;QACb,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC;KAC5C;;;;;;IAMD,cAAc,GAAG;QACb,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;KAC1B;;;;;;IAMD,IAAI,gBAAgB,GAAG;QACnB,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC3B;;;;;;IAMD,IAAI,QAAQ,GAAG;QACX,OAAO,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC;KAC1C;;;;;;IAMD,IAAI,SAAS,GAAG;QACZ,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,SAAS;KAC5B;;;;;;;IAOD,IAAI,UAAU,GAAG;QACb,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,WAAW;KAC9B;;;;;;;IAOD,IAAI,YAAY,GAAG;QACf,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO;KAC1B;IACD,IAAI,YAAY,CAAC,KAAK,EAAE;QACpB,IAAI,CAAC,KAAK,EAAE;YACR,MAAM;SACT;QACD,MAAM,IAAI,GAAG,EAAE,CAAC,IAAI,EAAC;;QAErB,IAAI,CAAC,OAAO,GAAG,KAAI;QACnB,IAAI,OAAO,IAAI,CAAC,KAAK,CAAC,YAAY,KAAK,SAAS,EAAE;YAC9C,IAAI,CAAC,KAAK,CAAC,YAAY,GAAG,KAAI;SACjC;KACJ;;;;;;;IAOD,IAAI,WAAW,GAAG;QACd,OAAO,CAAC,EAAE,CAAC,IAAI,CAAC,CAAC,QAAQ;KAC5B;IACD,IAAI,WAAW,CAAC,KAAK,EAAE;QACnB,IAAI,CAAC,KAAK,EAAE;YACR,aAAa,CAAC,EAAE,CAAC,IAAI,CAAC,EAAC;SAC1B;KACJ;;;;;;;;;IASD,SAAS,GAAG;;KAEX;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,aAAa,EAAE;IAClD,KAAK,EAAE,KAAK;IACZ,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF,IAAI,OAAO,MAAM,KAAK,WAAW,IAAI,OAAO,MAAM,CAAC,KAAK,KAAK,WAAW,EAAE;IACtE,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,SAAS,EAAE,MAAM,CAAC,KAAK,CAAC,SAAS,EAAC;;;IAG9D,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,KAAK,CAAC,SAAS,EAAE,KAAK,EAAC;CAC9C;;;;;;;;AAQD,SAAS,wBAAwB,CAAC,GAAG,EAAE;IACnC,OAAO;QACH,GAAG,GAAG;YACF,OAAO,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC;SAC7B;QACD,GAAG,CAAC,KAAK,EAAE;YACP,EAAE,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,GAAG,MAAK;SAC9B;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,GAAG,EAAE;IAC/B,OAAO;QACH,KAAK,GAAG;YACJ,MAAM,KAAK,GAAG,EAAE,CAAC,IAAI,CAAC,CAAC,MAAK;YAC5B,OAAO,KAAK,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,KAAK,EAAE,SAAS,CAAC;SAC5C;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;;AASD,SAAS,aAAa,CAAC,SAAS,EAAE,KAAK,EAAE;IACrC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,KAAK,EAAC;IAC/B,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;QACnB,OAAO,SAAS;KACnB;;;IAGD,SAAS,WAAW,CAAC,WAAW,EAAE,KAAK,EAAE;QACrC,SAAS,CAAC,IAAI,CAAC,IAAI,EAAE,WAAW,EAAE,KAAK,EAAC;KAC3C;;IAED,WAAW,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,SAAS,CAAC,SAAS,EAAE;QACvD,WAAW,EAAE,EAAE,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,IAAI,EAAE,QAAQ,EAAE,IAAI,EAAE;KAC1E,EAAC;;;IAGF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QAClC,MAAM,GAAG,GAAG,IAAI,CAAC,CAAC,EAAC;QACnB,IAAI,EAAE,GAAG,IAAI,SAAS,CAAC,SAAS,CAAC,EAAE;YAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,wBAAwB,CAAC,KAAK,EAAE,GAAG,EAAC;YAC9D,MAAM,MAAM,GAAG,OAAO,UAAU,CAAC,KAAK,KAAK,WAAU;YACrD,MAAM,CAAC,cAAc;gBACjB,WAAW,CAAC,SAAS;gBACrB,GAAG;gBACH,MAAM;sBACA,oBAAoB,CAAC,GAAG,CAAC;sBACzB,wBAAwB,CAAC,GAAG,CAAC;cACtC;SACJ;KACJ;;IAED,OAAO,WAAW;CACrB;;;;;;;;AAQD,SAAS,UAAU,CAAC,KAAK,EAAE;IACvB,IAAI,KAAK,IAAI,IAAI,IAAI,KAAK,KAAK,MAAM,CAAC,SAAS,EAAE;QAC7C,OAAO,KAAK;KACf;;IAED,IAAI,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAC;IACjC,IAAI,OAAO,IAAI,IAAI,EAAE;QACjB,OAAO,GAAG,aAAa,CAAC,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,CAAC,EAAE,KAAK,EAAC;QACxE,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,OAAO,EAAC;KAC/B;IACD,OAAO,OAAO;CACjB;;;;;;;;;AASD,AAAO,SAAS,SAAS,CAAC,WAAW,EAAE,KAAK,EAAE;IAC1C,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,cAAc,CAAC,KAAK,CAAC,EAAC;IACxD,OAAO,IAAI,OAAO,CAAC,WAAW,EAAE,KAAK,CAAC;CACzC;;;;;;;;AAQD,AAAO,SAAS,SAAS,CAAC,KAAK,EAAE;IAC7B,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,gBAAgB;CACpC;;;;;;;;;AASD,AAAO,SAAS,aAAa,CAAC,KAAK,EAAE,UAAU,EAAE;IAC7C,EAAE,CAAC,KAAK,CAAC,CAAC,UAAU,GAAG,WAAU;CACpC;;;;;;;;;AASD,AAAO,SAAS,gBAAgB,CAAC,KAAK,EAAE,aAAa,EAAE;IACnD,EAAE,CAAC,KAAK,CAAC,CAAC,aAAa,GAAG,cAAa;CAC1C;;;;;;;;;AASD,AAAO,SAAS,kBAAkB,CAAC,KAAK,EAAE,eAAe,EAAE;IACvD,EAAE,CAAC,KAAK,CAAC,CAAC,eAAe,GAAG,gBAAe;CAC9C;;ACtdD;;;;;;;;;;;;;;AAcA,MAAM,YAAY,GAAG,IAAI,OAAO,GAAE;;;AAGlC,MAAM,OAAO,GAAG,EAAC;AACjB,MAAM,MAAM,GAAG,EAAC;AAChB,MAAM,SAAS,GAAG,EAAC;;;;;;;AAOnB,SAAS,QAAQ,CAAC,CAAC,EAAE;IACjB,OAAO,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,KAAK,QAAQ;CAC7C;;;;;;;;AAQD,SAAS,YAAY,CAAC,WAAW,EAAE;IAC/B,MAAM,SAAS,GAAG,YAAY,CAAC,GAAG,CAAC,WAAW,EAAC;IAC/C,IAAI,SAAS,IAAI,IAAI,EAAE;QACnB,MAAM,IAAI,SAAS;YACf,kEAAkE;SACrE;KACJ;IACD,OAAO,SAAS;CACnB;;;;;;;;AAQD,SAAS,8BAA8B,CAAC,SAAS,EAAE;IAC/C,OAAO;QACH,GAAG,GAAG;YACF,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;YACpC,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;oBACjC,OAAO,IAAI,CAAC,QAAQ;iBACvB;gBACD,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;YACD,OAAO,IAAI;SACd;;QAED,GAAG,CAAC,QAAQ,EAAE;YACV,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;gBACvD,QAAQ,GAAG,KAAI;aAClB;YACD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;;;YAGpC,IAAI,IAAI,GAAG,KAAI;YACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;YACnC,OAAO,IAAI,IAAI,IAAI,EAAE;gBACjB,IAAI,IAAI,CAAC,YAAY,KAAK,SAAS,EAAE;;oBAEjC,IAAI,IAAI,KAAK,IAAI,EAAE;wBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;qBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;wBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;qBACtC,MAAM;wBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;qBAC9B;iBACJ,MAAM;oBACH,IAAI,GAAG,KAAI;iBACd;;gBAED,IAAI,GAAG,IAAI,CAAC,KAAI;aACnB;;;YAGD,IAAI,QAAQ,KAAK,IAAI,EAAE;gBACnB,MAAM,OAAO,GAAG;oBACZ,QAAQ;oBACR,YAAY,EAAE,SAAS;oBACvB,OAAO,EAAE,KAAK;oBACd,IAAI,EAAE,KAAK;oBACX,IAAI,EAAE,IAAI;kBACb;gBACD,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;iBACpC,MAAM;oBACH,IAAI,CAAC,IAAI,GAAG,QAAO;iBACtB;aACJ;SACJ;QACD,YAAY,EAAE,IAAI;QAClB,UAAU,EAAE,IAAI;KACnB;CACJ;;;;;;;;AAQD,SAAS,oBAAoB,CAAC,oBAAoB,EAAE,SAAS,EAAE;IAC3D,MAAM,CAAC,cAAc;QACjB,oBAAoB;QACpB,CAAC,EAAE,EAAE,SAAS,CAAC,CAAC;QAChB,8BAA8B,CAAC,SAAS,CAAC;MAC5C;CACJ;;;;;;;;AAQD,SAAS,uBAAuB,CAAC,UAAU,EAAE;;IAEzC,SAAS,iBAAiB,GAAG;QACzB,WAAW,CAAC,IAAI,CAAC,IAAI,EAAC;KACzB;;IAED,iBAAiB,CAAC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,WAAW,CAAC,SAAS,EAAE;QAC/D,WAAW,EAAE;YACT,KAAK,EAAE,iBAAiB;YACxB,YAAY,EAAE,IAAI;YAClB,QAAQ,EAAE,IAAI;SACjB;KACJ,EAAC;;IAEF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;QACxC,oBAAoB,CAAC,iBAAiB,CAAC,SAAS,EAAE,UAAU,CAAC,CAAC,CAAC,EAAC;KACnE;;IAED,OAAO,iBAAiB;CAC3B;;;;;;;;;;;;;;;AAeD,SAAS,WAAW,GAAG;;IAEnB,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7B,YAAY,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,GAAG,EAAE,EAAC;QACjC,MAAM;KACT;IACD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,IAAI,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;QACvD,OAAO,uBAAuB,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC;KAC/C;IACD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE;QACtB,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,SAAS,CAAC,MAAM,EAAC;QACzC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,EAAE,CAAC,EAAE;YACvC,KAAK,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC,CAAC,EAAC;SAC1B;QACD,OAAO,uBAAuB,CAAC,KAAK,CAAC;KACxC;IACD,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC;;CAE3D;;;AAGD,WAAW,CAAC,SAAS,GAAG;;;;;;;;IAQpB,gBAAgB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC3C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;QACD,IAAI,OAAO,QAAQ,KAAK,UAAU,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE;YACvD,MAAM,IAAI,SAAS,CAAC,+CAA+C,CAAC;SACvE;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,EAAC;QACtC,MAAM,OAAO,GAAG,YAAY;cACtB,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;QAC/C,MAAM,OAAO,GAAG;YACZ,QAAQ;YACR,YAAY;YACZ,OAAO,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;YACjD,IAAI,EAAE,YAAY,IAAI,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC;YAC3C,IAAI,EAAE,IAAI;UACb;;;QAGD,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,KAAK,SAAS,EAAE;YACpB,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,OAAO,EAAC;YACjC,MAAM;SACT;;;QAGD,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;;gBAEE,MAAM;aACT;YACD,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;;;QAGD,IAAI,CAAC,IAAI,GAAG,QAAO;KACtB;;;;;;;;;IASD,mBAAmB,CAAC,SAAS,EAAE,QAAQ,EAAE,OAAO,EAAE;QAC9C,IAAI,QAAQ,IAAI,IAAI,EAAE;YAClB,MAAM;SACT;;QAED,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,OAAO,CAAC;cAC3B,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC;cACxB,OAAO,CAAC,OAAO,EAAC;QACtB,MAAM,YAAY,GAAG,OAAO,GAAG,OAAO,GAAG,OAAM;;QAE/C,IAAI,IAAI,GAAG,KAAI;QACf,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,OAAO,IAAI,IAAI,IAAI,EAAE;YACjB;gBACI,IAAI,CAAC,QAAQ,KAAK,QAAQ;gBAC1B,IAAI,CAAC,YAAY,KAAK,YAAY;cACpC;gBACE,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;gBACD,MAAM;aACT;;YAED,IAAI,GAAG,KAAI;YACX,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;KACJ;;;;;;;IAOD,aAAa,CAAC,KAAK,EAAE;QACjB,IAAI,KAAK,IAAI,IAAI,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACjD,MAAM,IAAI,SAAS,CAAC,kCAAkC,CAAC;SAC1D;;;QAGD,MAAM,SAAS,GAAG,YAAY,CAAC,IAAI,EAAC;QACpC,MAAM,SAAS,GAAG,KAAK,CAAC,KAAI;QAC5B,IAAI,IAAI,GAAG,SAAS,CAAC,GAAG,CAAC,SAAS,EAAC;QACnC,IAAI,IAAI,IAAI,IAAI,EAAE;YACd,OAAO,IAAI;SACd;;;QAGD,MAAM,YAAY,GAAG,SAAS,CAAC,IAAI,EAAE,KAAK,EAAC;;;;QAI3C,IAAI,IAAI,GAAG,KAAI;QACf,OAAO,IAAI,IAAI,IAAI,EAAE;;YAEjB,IAAI,IAAI,CAAC,IAAI,EAAE;gBACX,IAAI,IAAI,KAAK,IAAI,EAAE;oBACf,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,KAAI;iBACxB,MAAM,IAAI,IAAI,CAAC,IAAI,KAAK,IAAI,EAAE;oBAC3B,SAAS,CAAC,GAAG,CAAC,SAAS,EAAE,IAAI,CAAC,IAAI,EAAC;iBACtC,MAAM;oBACH,SAAS,CAAC,MAAM,CAAC,SAAS,EAAC;iBAC9B;aACJ,MAAM;gBACH,IAAI,GAAG,KAAI;aACd;;;YAGD,kBAAkB;gBACd,YAAY;gBACZ,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,QAAQ,GAAG,IAAI;cACtC;YACD,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACrC,IAAI;oBACA,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,YAAY,EAAC;iBACzC,CAAC,OAAO,GAAG,EAAE;oBACV;wBACI,OAAO,OAAO,KAAK,WAAW;wBAC9B,OAAO,OAAO,CAAC,KAAK,KAAK,UAAU;sBACrC;wBACE,OAAO,CAAC,KAAK,CAAC,GAAG,EAAC;qBACrB;iBACJ;aACJ,MAAM;gBACH,IAAI,CAAC,YAAY,KAAK,SAAS;gBAC/B,OAAO,IAAI,CAAC,QAAQ,CAAC,WAAW,KAAK,UAAU;cACjD;gBACE,IAAI,CAAC,QAAQ,CAAC,WAAW,CAAC,YAAY,EAAC;aAC1C;;;YAGD,IAAI,SAAS,CAAC,YAAY,CAAC,EAAE;gBACzB,KAAK;aACR;;YAED,IAAI,GAAG,IAAI,CAAC,KAAI;SACnB;QACD,kBAAkB,CAAC,YAAY,EAAE,IAAI,EAAC;QACtC,aAAa,CAAC,YAAY,EAAE,CAAC,EAAC;QAC9B,gBAAgB,CAAC,YAAY,EAAE,IAAI,EAAC;;QAEpC,OAAO,CAAC,YAAY,CAAC,gBAAgB;KACxC;EACJ;;;AAGD,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,aAAa,EAAE;IACxD,KAAK,EAAE,WAAW;IAClB,YAAY,EAAE,IAAI;IAClB,QAAQ,EAAE,IAAI;CACjB,EAAC;;;AAGF;IACI,OAAO,MAAM,KAAK,WAAW;IAC7B,OAAO,MAAM,CAAC,WAAW,KAAK,WAAW;EAC3C;IACE,MAAM,CAAC,cAAc,CAAC,WAAW,CAAC,SAAS,EAAE,MAAM,CAAC,WAAW,CAAC,SAAS,EAAC;CAC7E;;;;;"} \ No newline at end of file diff --git a/node_modules/event-target-shim/dist/event-target-shim.umd.js b/node_modules/event-target-shim/dist/event-target-shim.umd.js new file mode 100644 index 0000000000..e7cf5d4d58 --- /dev/null +++ b/node_modules/event-target-shim/dist/event-target-shim.umd.js @@ -0,0 +1,6 @@ +/** + * @author Toru Nagashima + * @copyright 2015 Toru Nagashima. All rights reserved. + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.EventTargetShim={}))})(this,function(a){"use strict";function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a){var b=u.get(a);return console.assert(null!=b,"'this' is expected an Event object, but got",a),b}function d(a){return null==a.passiveListener?void(!a.event.cancelable||(a.canceled=!0,"function"==typeof a.event.preventDefault&&a.event.preventDefault())):void("undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",a.passiveListener))}function e(a,b){u.set(this,{eventTarget:a,event:b,eventPhase:2,currentTarget:a,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:b.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var c,d=Object.keys(b),e=0;e}\n * @private\n */\nconst privateData = new WeakMap()\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap()\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event)\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n )\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n )\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault()\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n })\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true })\n\n // Define accessors\n const keys = Object.keys(event)\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key))\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation()\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this)\n\n data.stopped = true\n data.immediateStopped = true\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation()\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this))\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this)\n\n data.stopped = true\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this))\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n})\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype)\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event)\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto)\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event)\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n })\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i]\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key)\n const isFunc = typeof descriptor.value === \"function\"\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n )\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto)\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto)\n wrappers.set(proto, wrapper)\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nexport function wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event))\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nexport function isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nexport function setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nexport function setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nexport function setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener\n}\n","import {\n isStopped,\n setCurrentTarget,\n setEventPhase,\n setPassiveListener,\n wrapEvent,\n} from \"./event.mjs\"\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap()\n\n// Listener types\nconst CAPTURE = 1\nconst BUBBLE = 2\nconst ATTRIBUTE = 3\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget)\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this)\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this)\n\n // Traverse to the tail while removing old value.\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n node = node.next\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n }\n if (prev === null) {\n listeners.set(eventName, newNode)\n } else {\n prev.next = newNode\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n )\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this)\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n })\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i])\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map())\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length)\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i]\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this)\n const optionsIsObj = isObject(options)\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n }\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName)\n if (node === undefined) {\n listeners.set(eventName, newNode)\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node\n node = node.next\n }\n\n // Add it.\n prev.next = newNode\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this)\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options)\n const listenerType = capture ? CAPTURE : BUBBLE\n\n let prev = null\n let node = listeners.get(eventName)\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n return\n }\n\n prev = node\n node = node.next\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this)\n const eventName = event.type\n let node = listeners.get(eventName)\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event)\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next\n } else if (node.next !== null) {\n listeners.set(eventName, node.next)\n } else {\n listeners.delete(eventName)\n }\n } else {\n prev = node\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n )\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent)\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err)\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent)\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next\n }\n setPassiveListener(wrappedEvent, null)\n setEventPhase(wrappedEvent, 0)\n setCurrentTarget(wrappedEvent, null)\n\n return !wrappedEvent.defaultPrevented\n },\n}\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n})\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype)\n}\n\nexport { defineEventAttribute, EventTarget }\nexport default EventTarget\n"],"names":["pd","event","retv","privateData","get","console","assert","setCancelFlag","data","passiveListener","cancelable","canceled","preventDefault","error","Event","eventTarget","set","eventPhase","currentTarget","stopped","immediateStopped","timeStamp","Date","now","Object","defineProperty","value","enumerable","key","keys","i","length","defineRedirectDescriptor","configurable","defineCallDescriptor","apply","arguments","defineWrapper","BaseEvent","proto","CustomEvent","call","prototype","create","constructor","writable","descriptor","getOwnPropertyDescriptor","isFunc","getWrapper","wrapper","wrappers","getPrototypeOf","wrapEvent","Wrapper","isStopped","setEventPhase","setCurrentTarget","setPassiveListener","isObject","x","_typeof","getListeners","listeners","listenersMap","TypeError","defineEventAttributeDescriptor","eventName","node","listenerType","listener","next","prev","delete","newNode","passive","once","defineEventAttribute","eventTargetPrototype","defineCustomEventTarget","eventNames","CustomEventTarget","EventTarget","Map","Array","isArray","types","WeakMap","type","target","composedPath","NONE","CAPTURING_PHASE","AT_TARGET","BUBBLING_PHASE","stopPropagation","stopImmediatePropagation","bubbles","defaultPrevented","composed","srcElement","cancelBubble","returnValue","initEvent","window","setPrototypeOf","CAPTURE","BUBBLE","addEventListener","options","optionsIsObj","capture","removeEventListener","dispatchEvent","wrappedEvent","err","handleEvent"],"mappings":";;;;wbAkCA,QAASA,CAAAA,CAAT,CAAYC,CAAZ,CAAmB,IACTC,CAAAA,CAAI,CAAGC,CAAW,CAACC,GAAZD,CAAgBF,CAAhBE,QACbE,CAAAA,OAAO,CAACC,MAARD,CACY,IAARH,EAAAA,CADJG,CAEI,6CAFJA,CAGIJ,CAHJI,EAKOH,EAOX,QAASK,CAAAA,CAAT,CAAuBC,CAAvB,CAA6B,OACG,KAAxBA,EAAAA,CAAI,CAACC,eADgB,MAarB,CAACD,CAAI,CAACP,KAALO,CAAWE,UAbS,GAiBzBF,CAAI,CAACG,QAALH,GAjByB,CAkBgB,UAArC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWI,cAlBG,EAmBrBJ,CAAI,CAACP,KAALO,CAAWI,cAAXJ,EAnBqB,QAGE,WAAnB,QAAOH,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAJE,EAMjBR,OAAO,CAACQ,KAARR,CACI,oEADJA,CAEIG,CAAI,CAACC,eAFTJ,CANiB,EAiC7B,QAASS,CAAAA,CAAT,CAAeC,CAAf,CAA4Bd,CAA5B,CAAmC,CAC/BE,CAAW,CAACa,GAAZb,CAAgB,IAAhBA,CAAsB,CAClBY,WAAW,CAAXA,CADkB,CAElBd,KAAK,CAALA,CAFkB,CAGlBgB,UAAU,CAAE,CAHM,CAIlBC,aAAa,CAAEH,CAJG,CAKlBJ,QAAQ,GALU,CAMlBQ,OAAO,GANW,CAOlBC,gBAAgB,GAPE,CAQlBX,eAAe,CAAE,IARC,CASlBY,SAAS,CAAEpB,CAAK,CAACoB,SAANpB,EAAmBqB,IAAI,CAACC,GAALD,EATZ,CAAtBnB,CAD+B,CAc/BqB,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4B,WAA5BA,CAAyC,CAAEE,KAAK,GAAP,CAAgBC,UAAU,GAA1B,CAAzCH,CAd+B,QAmBrBI,CAAAA,EAFJC,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYvB,CAAZuB,EACJM,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,EACzBF,EAAMC,CAAI,CAACC,CAAD,EACVF,CAAG,GAAI,OACTJ,MAAM,CAACC,cAAPD,CAAsB,IAAtBA,CAA4BI,CAA5BJ,CAAiCQ,CAAwB,CAACJ,CAAD,CAAzDJ,EAyOZ,QAASQ,CAAAA,CAAT,CAAkCJ,CAAlC,CAAuC,OAC5B,CACHxB,GADG,WACG,OACKJ,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,CAFR,CAAA,CAIHgB,GAJG,UAICU,EAAO,CACP1B,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAe4B,CAAf5B,EAAsB0B,CALvB,CAAA,CAOHO,YAAY,GAPT,CAQHN,UAAU,GARP,EAkBX,QAASO,CAAAA,CAAT,CAA8BN,CAA9B,CAAmC,OACxB,CACHF,KADG,WACK,IACEzB,CAAAA,CAAK,CAAGD,CAAE,CAAC,IAAD,CAAFA,CAASC,YAChBA,CAAAA,CAAK,CAAC2B,CAAD,CAAL3B,CAAWkC,KAAXlC,CAAiBA,CAAjBA,CAAwBmC,SAAxBnC,CAHR,CAAA,CAKHgC,YAAY,GALT,CAMHN,UAAU,GANP,EAiBX,QAASU,CAAAA,CAAT,CAAuBC,CAAvB,CAAkCC,CAAlC,CAAyC,SAO5BC,CAAAA,EAAYzB,EAAad,EAAO,CACrCqC,CAAS,CAACG,IAAVH,CAAe,IAAfA,CAAqBvB,CAArBuB,CAAkCrC,CAAlCqC,KAPET,CAAAA,CAAI,CAAGL,MAAM,CAACK,IAAPL,CAAYe,CAAZf,KACO,CAAhBK,GAAAA,CAAI,CAACE,aACEO,CAAAA,EAQXE,CAAW,CAACE,SAAZF,CAAwBhB,MAAM,CAACmB,MAAPnB,CAAcc,CAAS,CAACI,SAAxBlB,CAAmC,CACvDoB,WAAW,CAAE,CAAElB,KAAK,CAAEc,CAAT,CAAsBP,YAAY,GAAlC,CAA0CY,QAAQ,GAAlD,CAD0C,CAAnCrB,CAXa,KAgBhC,GACKI,CAAAA,CADL,CAAIE,CAAC,CAAG,EAAGA,CAAC,CAAGD,CAAI,CAACE,OAAQ,EAAED,KACzBF,EAAMC,CAAI,CAACC,CAAD,EACZ,EAAEF,CAAG,GAAIU,CAAAA,CAAS,CAACI,SAAnB,EAA+B,IACzBI,CAAAA,CAAU,CAAGtB,MAAM,CAACuB,wBAAPvB,CAAgCe,CAAhCf,CAAuCI,CAAvCJ,CADY,CAEzBwB,CAAM,CAA+B,UAA5B,QAAOF,CAAAA,CAAU,CAACpB,KAFF,CAG/BF,MAAM,CAACC,cAAPD,CACIgB,CAAW,CAACE,SADhBlB,CAEII,CAFJJ,CAGIwB,CAAM,CACAd,CAAoB,CAACN,CAAD,CADpB,CAEAI,CAAwB,CAACJ,CAAD,CALlCJ,QAUDgB,CAAAA,EASX,QAASS,CAAAA,CAAT,CAAoBV,CAApB,CAA2B,IACV,IAATA,EAAAA,CAAK,EAAYA,CAAK,GAAKf,MAAM,CAACkB,gBAC3B5B,CAAAA,KAGPoC,CAAAA,CAAO,CAAGC,CAAQ,CAAC/C,GAAT+C,CAAaZ,CAAbY,QACC,KAAXD,EAAAA,IACAA,CAAO,CAAGb,CAAa,CAACY,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBe,CAAtBf,CAAD,CAAX,CAA2Ce,CAA3C,EACvBY,CAAQ,CAACnC,GAATmC,CAAaZ,CAAbY,CAAoBD,CAApBC,GAEGD,EAUJ,QAASG,CAAAA,CAAT,CAAmBtC,CAAnB,CAAgCd,CAAhC,CAAuC,IACpCqD,CAAAA,CAAO,CAAGL,CAAU,CAACzB,MAAM,CAAC4B,cAAP5B,CAAsBvB,CAAtBuB,CAAD,QACnB,IAAI8B,CAAAA,CAAJ,CAAYvC,CAAZ,CAAyBd,CAAzB,EASJ,QAASsD,CAAAA,CAAT,CAAmBtD,CAAnB,CAA0B,OACtBD,CAAAA,CAAE,CAACC,CAAD,CAAFD,CAAUoB,iBAUd,QAASoC,CAAAA,CAAT,CAAuBvD,CAAvB,CAA8BgB,CAA9B,CAA0C,CAC7CjB,CAAE,CAACC,CAAD,CAAFD,CAAUiB,UAAVjB,CAAuBiB,EAUpB,QAASwC,CAAAA,CAAT,CAA0BxD,CAA1B,CAAiCiB,CAAjC,CAAgD,CACnDlB,CAAE,CAACC,CAAD,CAAFD,CAAUkB,aAAVlB,CAA0BkB,EAUvB,QAASwC,CAAAA,CAAT,CAA4BzD,CAA5B,CAAmCQ,CAAnC,CAAoD,CACvDT,CAAE,CAACC,CAAD,CAAFD,CAAUS,eAAVT,CAA4BS,EC3bhC,QAASkD,CAAAA,CAAT,CAAkBC,CAAlB,CAAqB,OACJ,KAANA,GAAAA,CAAC,EAA0B,QAAb,GAAAC,EAAOD,GAShC,QAASE,CAAAA,CAAT,CAAsB/C,CAAtB,CAAmC,IACzBgD,CAAAA,CAAS,CAAGC,CAAY,CAAC5D,GAAb4D,CAAiBjD,CAAjBiD,KACD,IAAbD,EAAAA,OACM,IAAIE,CAAAA,SAAJ,CACF,kEADE,QAIHF,CAAAA,EASX,QAASG,CAAAA,CAAT,CAAwCC,CAAxC,CAAmD,OACxC,CACH/D,GADG,WACG,QACI2D,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAD5B,CAEEM,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CAFT,CAGa,IAARK,EAAAA,CAHL,EAGmB,IACbA,IAAAA,CAAI,CAACC,mBACED,CAAAA,CAAI,CAACE,SAEhBF,CAAI,CAAGA,CAAI,CAACG,WAET,KAVR,CAAA,CAaHvD,GAbG,UAaCsD,EAAU,CACc,UAApB,QAAOA,CAAAA,CAAP,EAAmCX,CAAQ,CAACW,CAAD,CADrC,GAENA,CAAQ,CAAG,IAFL,SAIJP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CAJpB,CAONU,CAAI,CAAG,IAPD,CAQNJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARD,CASK,IAARK,EAAAA,CATG,EAUFA,IAAAA,CAAI,CAACC,YAVH,CAYW,IAATG,GAAAA,CAZF,CAcuB,IAAdJ,GAAAA,CAAI,CAACG,IAdd,CAiBER,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,CAjBF,CAeEA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,CAfF,CAaES,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,IAbnB,CAoBFC,CAAI,CAAGJ,CApBL,CAuBNA,CAAI,CAAGA,CAAI,CAACG,IAvBN,IA2BO,IAAbD,GAAAA,EAAmB,IACbI,CAAAA,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,EAFA,CAGZM,OAAO,GAHK,CAIZC,IAAI,GAJQ,CAKZL,IAAI,CAAE,IALM,EAOH,IAATC,GAAAA,CARe,CASfT,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,CATe,CAWfS,CAAI,CAACD,IAALC,CAAYE,EAnDrB,CAAA,CAuDHzC,YAAY,GAvDT,CAwDHN,UAAU,GAxDP,EAkEX,QAASkD,CAAAA,CAAT,CAA8BC,CAA9B,CAAoDX,CAApD,CAA+D,CAC3D3C,MAAM,CAACC,cAAPD,CACIsD,CADJtD,aAES2C,EAFT3C,CAGI0C,CAA8B,CAACC,CAAD,CAHlC3C,EAaJ,QAASuD,CAAAA,CAAT,CAAiCC,CAAjC,CAA6C,SAEhCC,CAAAA,GAAoB,CACzBC,CAAW,CAACzC,IAAZyC,CAAiB,IAAjBA,EAGJD,CAAiB,CAACvC,SAAlBuC,CAA8BzD,MAAM,CAACmB,MAAPnB,CAAc0D,CAAW,CAACxC,SAA1BlB,CAAqC,CAC/DoB,WAAW,CAAE,CACTlB,KAAK,CAAEuD,CADE,CAEThD,YAAY,GAFH,CAGTY,QAAQ,GAHC,CADkD,CAArCrB,CANW,KAcpC,GAAIM,CAAAA,CAAC,CAAG,EAAGA,CAAC,CAAGkD,CAAU,CAACjD,OAAQ,EAAED,EACrC+C,CAAoB,CAACI,CAAiB,CAACvC,SAAnB,CAA8BsC,CAAU,CAAClD,CAAD,CAAxC,CAApB+C,OAGGI,CAAAA,EAgBX,QAASC,CAAAA,CAAT,EAAuB,IAEf,eAAgBA,CAAAA,aAChBlB,CAAAA,CAAY,CAAChD,GAAbgD,CAAiB,IAAjBA,CAAuB,GAAImB,CAAAA,GAA3BnB,KAGqB,CAArB5B,GAAAA,SAAS,CAACL,MAAVK,EAA0BgD,KAAK,CAACC,OAAND,CAAchD,SAAS,CAAC,CAAD,CAAvBgD,QACnBL,CAAAA,CAAuB,CAAC3C,SAAS,CAAC,CAAD,CAAV,KAEX,CAAnBA,CAAAA,SAAS,CAACL,OAAY,QAChBuD,CAAAA,CAAK,CAAOF,KAAP,CAAahD,SAAS,CAACL,MAAvB,EACFD,CAAC,CAAG,EAAGA,CAAC,CAAGM,SAAS,CAACL,OAAQ,EAAED,EACpCwD,CAAK,CAACxD,CAAD,CAALwD,CAAWlD,SAAS,CAACN,CAAD,CAApBwD,OAEGP,CAAAA,CAAuB,CAACO,CAAD,OAE5B,IAAIrB,CAAAA,SAAJ,CAAc,mCAAd,KD5KJ9D,CAAAA,CAAW,CAAG,GAAIoF,CAAAA,QAOlBpC,CAAQ,CAAG,GAAIoC,CAAAA,QAkFrBzE,CAAK,CAAC4B,SAAN5B,CAAkB,IAKV0E,CAAAA,MAAO,OACAxF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAewF,IANZ,CAAA,IAaVC,CAAAA,QAAS,OACFzF,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WAdN,CAAA,IAqBVG,CAAAA,eAAgB,OACTlB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASkB,aAtBN,CAAA,CA4BdwE,YA5Bc,WA4BC,IACLxE,CAAAA,CAAa,CAAGlB,CAAE,CAAC,IAAD,CAAFA,CAASkB,cADpB,MAEU,KAAjBA,EAAAA,CAFO,CAGA,EAHA,CAKJ,CAACA,CAAD,CAjCG,CAAA,IAwCVyE,CAAAA,MAAO,OACA,EAzCG,CAAA,IAgDVC,CAAAA,iBAAkB,OACX,EAjDG,CAAA,IAwDVC,CAAAA,WAAY,OACL,EAzDG,CAAA,IAgEVC,CAAAA,gBAAiB,OACV,EAjEG,CAAA,IAwEV7E,CAAAA,YAAa,OACNjB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASiB,UAzEN,CAAA,CAgFd8E,eAhFc,WAgFI,IACRvF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHc,CAI4B,UAAtC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWuF,eAJR,EAKVvF,CAAI,CAACP,KAALO,CAAWuF,eAAXvF,EArFM,CAAA,CA6FdwF,wBA7Fc,WA6Fa,IACjBxF,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,GAHuB,CAIvBA,CAAI,CAACY,gBAALZ,GAJuB,CAK4B,UAA/C,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAWwF,wBALC,EAMnBxF,CAAI,CAACP,KAALO,CAAWwF,wBAAXxF,EAnGM,CAAA,IA2GVyF,CAAAA,SAAU,SACKjG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeiG,OA5GpB,CAAA,IAmHVvF,CAAAA,YAAa,SACEV,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAeU,UApHpB,CAAA,CA2HdE,cA3Hc,WA2HG,CACbL,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA5HH,CAAA,IAmIVkG,CAAAA,kBAAmB,OACZlG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASW,QApIN,CAAA,IA2IVwF,CAAAA,UAAW,SACInG,CAAE,CAAC,IAAD,CAAFA,CAASC,KAATD,CAAemG,QA5IpB,CAAA,IAmJV9E,CAAAA,WAAY,OACLrB,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASqB,SApJN,CAAA,IA4JV+E,CAAAA,YAAa,OACNpG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASe,WA7JN,CAAA,IAqKVsF,CAAAA,cAAe,OACRrG,CAAAA,CAAE,CAAC,IAAD,CAAFA,CAASmB,OAtKN,CAAA,IAwKVkF,CAAAA,aAAa3E,EAAO,IACfA,MAGClB,CAAAA,CAAI,CAAGR,CAAE,CAAC,IAAD,EAEfQ,CAAI,CAACW,OAALX,IACuC,SAAnC,QAAOA,CAAAA,CAAI,CAACP,KAALO,CAAW6F,eAClB7F,CAAI,CAACP,KAALO,CAAW6F,YAAX7F,KAhLM,CAAA,IAyLV8F,CAAAA,aAAc,OACP,CAACtG,CAAE,CAAC,IAAD,CAAFA,CAASW,QA1LP,CAAA,IA4LV2F,CAAAA,YAAY5E,EAAO,CACdA,CADc,EAEfnB,CAAa,CAACP,CAAE,CAAC,IAAD,CAAH,CA9LP,CAAA,CAyMduG,SAzMc,WAyMF,EAzME,EA+MlB/E,MAAM,CAACC,cAAPD,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuC,aAAvCA,CAAsD,CAClDE,KAAK,CAAEZ,CAD2C,CAElDmB,YAAY,GAFsC,CAGlDY,QAAQ,GAH0C,CAAtDrB,EAOsB,WAAlB,QAAOgF,CAAAA,MAAP,EAAyD,WAAxB,QAAOA,CAAAA,MAAM,CAAC1F,QAC/CU,MAAM,CAACiF,cAAPjF,CAAsBV,CAAK,CAAC4B,SAA5BlB,CAAuCgF,MAAM,CAAC1F,KAAP0F,CAAa9D,SAApDlB,EAGA2B,CAAQ,CAACnC,GAATmC,CAAaqD,MAAM,CAAC1F,KAAP0F,CAAa9D,SAA1BS,CAAqCrC,CAArCqC,MChTEa,CAAAA,CAAY,CAAG,GAAIuB,CAAAA,QAGnBmB,CAAO,CAAG,EACVC,CAAM,CAAG,KA0KfzB,CAAW,CAACxC,SAAZwC,CAAwB,CAQpB0B,gBARoB,UAQHzC,EAAWG,EAAUuC,EAAS,IAC3B,IAAZvC,EAAAA,MAGoB,UAApB,QAAOA,CAAAA,CAAP,EAAkC,CAACX,CAAQ,CAACW,CAAD,OACrC,IAAIL,CAAAA,SAAJ,CAAc,+CAAd,KAGJF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBgD,CAAY,CAAGnD,CAAQ,CAACkD,CAAD,EACvBE,CAAO,CAAGD,CAAY,GACdD,CAAO,CAACE,OADM,GAEdF,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EACnCjC,CAAO,CAAG,CACZJ,QAAQ,CAARA,CADY,CAEZD,YAAY,CAAZA,CAFY,CAGZM,OAAO,CAAEmC,CAAY,IAAYD,CAAO,CAAClC,OAH7B,CAIZC,IAAI,CAAEkC,CAAY,IAAYD,CAAO,CAACjC,IAJ1B,CAKZL,IAAI,CAAE,IALM,EASZH,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,KACPK,SAAAA,aACAL,CAAAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBW,CAAzBX,SAKAS,CAAAA,CAAI,CAAG,KACI,IAARJ,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,SAK1BG,CAAI,CAAGJ,CARU,CASjBA,CAAI,CAAGA,CAAI,CAACG,IAxC2B,CA4C3CC,CAAI,CAACD,IAALC,CAAYE,EApDI,CAAA,CA8DpBsC,mBA9DoB,UA8DA7C,EAAWG,EAAUuC,EAAS,IAC9B,IAAZvC,EAAAA,SAIEP,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,EACxBiD,CAAO,CAAGpD,CAAQ,CAACkD,CAAD,CAARlD,GACFkD,CAAO,CAACE,OADNpD,GAEFkD,EACRxC,CAAY,CAAG0C,CAAO,CAAGL,CAAH,CAAaC,EAErCnC,CAAI,CAAG,KACPJ,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,EACI,IAARK,EAAAA,GAAc,IAEbA,CAAI,CAACE,QAALF,GAAkBE,CAAlBF,EACAA,CAAI,CAACC,YAALD,GAAsBC,cAET,IAATG,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,MASzBC,CAAI,CAAGJ,CAfU,CAgBjBA,CAAI,CAAGA,CAAI,CAACG,KA3FA,CAAA,CAoGpB0C,aApGoB,UAoGNhH,EAAO,IACJ,IAATA,EAAAA,CAAK,EAAkC,QAAtB,QAAOA,CAAAA,CAAK,CAACuF,UACxB,IAAIvB,CAAAA,SAAJ,CAAc,oCAAd,EAFO,GAMXF,CAAAA,CAAS,CAAGD,CAAY,CAAC,IAAD,CANb,CAOXK,CAAS,CAAGlE,CAAK,CAACuF,IAPP,CAQbpB,CAAI,CAAGL,CAAS,CAAC3D,GAAV2D,CAAcI,CAAdJ,CARM,IASL,IAARK,EAAAA,WATa,OAcX8C,CAAAA,CAAY,CAAG7D,CAAS,CAAC,IAAD,CAAOpD,CAAP,CAdb,CAkBbuE,CAAI,CAAG,IAlBM,CAmBF,IAARJ,EAAAA,CAnBU,EAmBI,IAEbA,CAAI,CAACQ,KACQ,IAATJ,GAAAA,EAEqB,IAAdJ,GAAAA,CAAI,CAACG,KAGZR,CAAS,CAACU,MAAVV,CAAiBI,CAAjBJ,EAFAA,CAAS,CAAC/C,GAAV+C,CAAcI,CAAdJ,CAAyBK,CAAI,CAACG,IAA9BR,EAFAS,CAAI,CAACD,IAALC,CAAYJ,CAAI,CAACG,KAOrBC,CAAI,CAAGJ,EAIXV,CAAkB,CACdwD,CADc,CAEd9C,CAAI,CAACO,OAALP,CAAeA,CAAI,CAACE,QAApBF,CAA+B,IAFjB,EAIW,UAAzB,QAAOA,CAAAA,CAAI,CAACE,YACR,CACAF,CAAI,CAACE,QAALF,CAAc3B,IAAd2B,CAAmB,IAAnBA,CAAyB8C,CAAzB9C,CADJ,CAEE,MAAO+C,CAAP,CAAY,CAEa,WAAnB,QAAO9G,CAAAA,OAAP,EACyB,UAAzB,QAAOA,CAAAA,OAAO,CAACQ,KAHT,EAKNR,OAAO,CAACQ,KAARR,CAAc8G,CAAd9G,MAIR+D,CAAAA,CAAI,CAACC,YAALD,GA/TE,CA+TFA,EACqC,UAArC,QAAOA,CAAAA,CAAI,CAACE,QAALF,CAAcgD,aAErBhD,CAAI,CAACE,QAALF,CAAcgD,WAAdhD,CAA0B8C,CAA1B9C,KAIAb,CAAS,CAAC2D,CAAD,QAIb9C,CAAI,CAAGA,CAAI,CAACG,WAEhBb,CAAAA,CAAkB,CAACwD,CAAD,CAAe,IAAf,EAClB1D,CAAa,CAAC0D,CAAD,CAAe,CAAf,EACbzD,CAAgB,CAACyD,CAAD,CAAe,IAAf,EAET,CAACA,CAAY,CAAChB,iBAvKL,EA4KxB1E,MAAM,CAACC,cAAPD,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6C,aAA7CA,CAA4D,CACxDE,KAAK,CAAEwD,CADiD,CAExDjD,YAAY,GAF4C,CAGxDY,QAAQ,GAHgD,CAA5DrB,EAQsB,WAAlB,QAAOgF,CAAAA,MAAP,EAC8B,WAA9B,QAAOA,CAAAA,MAAM,CAACtB,aAEd1D,MAAM,CAACiF,cAAPjF,CAAsB0D,CAAW,CAACxC,SAAlClB,CAA6CgF,MAAM,CAACtB,WAAPsB,CAAmB9D,SAAhElB"} \ No newline at end of file diff --git a/node_modules/event-target-shim/index.d.ts b/node_modules/event-target-shim/index.d.ts new file mode 100644 index 0000000000..a30309782f --- /dev/null +++ b/node_modules/event-target-shim/index.d.ts @@ -0,0 +1,399 @@ +export as namespace EventTargetShim + +/** + * `Event` interface. + * @see https://dom.spec.whatwg.org/#event + */ +export interface Event { + /** + * The type of this event. + */ + readonly type: string + + /** + * The target of this event. + */ + readonly target: EventTarget<{}, {}, "standard"> | null + + /** + * The current target of this event. + */ + readonly currentTarget: EventTarget<{}, {}, "standard"> | null + + /** + * The target of this event. + * @deprecated + */ + readonly srcElement: any | null + + /** + * The composed path of this event. + */ + composedPath(): EventTarget<{}, {}, "standard">[] + + /** + * Constant of NONE. + */ + readonly NONE: number + + /** + * Constant of CAPTURING_PHASE. + */ + readonly CAPTURING_PHASE: number + + /** + * Constant of BUBBLING_PHASE. + */ + readonly BUBBLING_PHASE: number + + /** + * Constant of AT_TARGET. + */ + readonly AT_TARGET: number + + /** + * Indicates which phase of the event flow is currently being evaluated. + */ + readonly eventPhase: number + + /** + * Stop event bubbling. + */ + stopPropagation(): void + + /** + * Stop event bubbling. + */ + stopImmediatePropagation(): void + + /** + * Initialize event. + * @deprecated + */ + initEvent(type: string, bubbles?: boolean, cancelable?: boolean): void + + /** + * The flag indicating bubbling. + */ + readonly bubbles: boolean + + /** + * Stop event bubbling. + * @deprecated + */ + cancelBubble: boolean + + /** + * Set or get cancellation flag. + * @deprecated + */ + returnValue: boolean + + /** + * The flag indicating whether the event can be canceled. + */ + readonly cancelable: boolean + + /** + * Cancel this event. + */ + preventDefault(): void + + /** + * The flag to indicating whether the event was canceled. + */ + readonly defaultPrevented: boolean + + /** + * The flag to indicating if event is composed. + */ + readonly composed: boolean + + /** + * Indicates whether the event was dispatched by the user agent. + */ + readonly isTrusted: boolean + + /** + * The unix time of this event. + */ + readonly timeStamp: number +} + +/** + * The constructor of `EventTarget` interface. + */ +export type EventTargetConstructor< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = { + prototype: EventTarget + new(): EventTarget +} + +/** + * `EventTarget` interface. + * @see https://dom.spec.whatwg.org/#interface-eventtarget + */ +export type EventTarget< + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" +> = EventTarget.EventAttributes & { + /** + * Add a given listener to this event target. + * @param eventName The event name to add. + * @param listener The listener to add. + * @param options The options for this listener. + */ + addEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.AddOptions + ): void + + /** + * Remove a given listener from this event target. + * @param eventName The event name to remove. + * @param listener The listener to remove. + * @param options The options for this listener. + */ + removeEventListener>( + type: TEventType, + listener: + | EventTarget.Listener> + | null, + options?: boolean | EventTarget.RemoveOptions + ): void + + /** + * Dispatch a given event. + * @param event The event to dispatch. + * @returns `false` if canceled. + */ + dispatchEvent>( + event: EventTarget.EventData + ): boolean +} + +export const EventTarget: EventTargetConstructor & { + /** + * Create an `EventTarget` instance with detailed event definition. + * + * The detailed event definition requires to use `defineEventAttribute()` + * function later. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * const signal = new EventTarget<{ abort: Event }, { onabort: Event }>() + * defineEventAttribute(signal, "abort") + */ + new < + TEvents extends EventTarget.EventDefinition, + TEventAttributes extends EventTarget.EventDefinition, + TMode extends EventTarget.Mode = "loose" + >(): EventTarget + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > + + /** + * Define an `EventTarget` constructor with attribute events and detailed event definition. + * + * Unfortunately, the second type parameter `TEventAttributes` was needed + * because we cannot compute string literal types. + * + * @example + * class AbortSignal extends EventTarget<{ abort: Event }, { onabort: Event }>("abort") { + * abort(): void {} + * } + * + * @param events Optional event attributes (e.g. passing in `"click"` adds `onclick` to prototype). + */ + < + TEvents extends EventTarget.EventDefinition = {}, + TEventAttributes extends EventTarget.EventDefinition = {}, + TMode extends EventTarget.Mode = "loose" + >(event0: string, ...events: string[]): EventTargetConstructor< + TEvents, + TEventAttributes, + TMode + > +} + +export namespace EventTarget { + /** + * Options of `removeEventListener()` method. + */ + export interface RemoveOptions { + /** + * The flag to indicate that the listener is for the capturing phase. + */ + capture?: boolean + } + + /** + * Options of `addEventListener()` method. + */ + export interface AddOptions extends RemoveOptions { + /** + * The flag to indicate that the listener doesn't support + * `event.preventDefault()` operation. + */ + passive?: boolean + /** + * The flag to indicate that the listener will be removed on the first + * event. + */ + once?: boolean + } + + /** + * The type of regular listeners. + */ + export interface FunctionListener { + (event: TEvent): void + } + + /** + * The type of object listeners. + */ + export interface ObjectListener { + handleEvent(event: TEvent): void + } + + /** + * The type of listeners. + */ + export type Listener = + | FunctionListener + | ObjectListener + + /** + * Event definition. + */ + export type EventDefinition = { + readonly [key: string]: Event + } + + /** + * Mapped type for event attributes. + */ + export type EventAttributes = { + [P in keyof TEventAttributes]: + | FunctionListener + | null + } + + /** + * The type of event data for `dispatchEvent()` method. + */ + export type EventData< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + TMode extends Mode + > = + TEventType extends keyof TEvents + ? ( + // Require properties which are not generated automatically. + & Pick< + TEvents[TEventType], + Exclude + > + // Properties which are generated automatically are optional. + & Partial> + ) + : ( + TMode extends "standard" + ? Event + : Event | NonStandardEvent + ) + + /** + * The string literal types of the properties which are generated + * automatically in `dispatchEvent()` method. + */ + export type OmittableEventKeys = Exclude + + /** + * The type of event data. + */ + export type NonStandardEvent = { + [key: string]: any + type: string + } + + /** + * The type of listeners. + */ + export type PickEvent< + TEvents extends EventDefinition, + TEventType extends keyof TEvents | string, + > = + TEventType extends keyof TEvents + ? TEvents[TEventType] + : Event + + /** + * Event type candidates. + */ + export type EventType< + TEvents extends EventDefinition, + TMode extends Mode + > = + TMode extends "strict" + ? keyof TEvents + : keyof TEvents | string + + /** + * - `"strict"` ..... Methods don't accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"loose"` ...... Methods accept unknown events. + * `dispatchEvent()` accepts partial objects. + * - `"standard"` ... Methods accept unknown events. + * `dispatchEvent()` doesn't accept partial objects. + */ + export type Mode = "strict" | "standard" | "loose" +} + +/** + * Specialized `type` property. + */ +export type Type = { type: T } + +/** + * Define an event attribute (e.g. `eventTarget.onclick`). + * @param prototype The event target prototype to define an event attribute. + * @param eventName The event name to define. + */ +export function defineEventAttribute( + prototype: EventTarget, + eventName: string +): void + +export default EventTarget diff --git a/node_modules/event-target-shim/package.json b/node_modules/event-target-shim/package.json new file mode 100644 index 0000000000..40326f3d74 --- /dev/null +++ b/node_modules/event-target-shim/package.json @@ -0,0 +1,82 @@ +{ + "name": "event-target-shim", + "version": "5.0.1", + "description": "An implementation of WHATWG EventTarget interface.", + "main": "dist/event-target-shim", + "types": "index.d.ts", + "files": [ + "dist", + "index.d.ts" + ], + "engines": { + "node": ">=6" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "nyc report --reporter lcov && opener coverage/lcov-report/index.html", + "lint": "eslint src test scripts --ext .js,.mjs", + "build": "rollup -c scripts/rollup.config.js", + "pretest": "npm run lint", + "test": "run-s test:*", + "test:mocha": "nyc --require ./scripts/babel-register mocha test/*.mjs", + "test:karma": "karma start scripts/karma.conf.js --single-run", + "watch": "run-p watch:*", + "watch:mocha": "mocha test/*.mjs --require ./scripts/babel-register --watch --watch-extensions js,mjs --growl", + "watch:karma": "karma start scripts/karma.conf.js --watch", + "codecov": "codecov" + }, + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.2.3", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.0.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.1", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-json": "^3.1.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-watch": "^4.3.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "repository": { + "type": "git", + "url": "https://github.com/mysticatea/event-target-shim.git" + }, + "keywords": [ + "w3c", + "whatwg", + "eventtarget", + "event", + "events", + "shim" + ], + "author": "Toru Nagashima", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/event-target-shim/issues" + }, + "homepage": "https://github.com/mysticatea/event-target-shim" +} diff --git a/node_modules/events/.airtap.yml b/node_modules/events/.airtap.yml new file mode 100644 index 0000000000..c7a8a87d5e --- /dev/null +++ b/node_modules/events/.airtap.yml @@ -0,0 +1,15 @@ +sauce_connect: true +loopback: airtap.local +browsers: + - name: chrome + version: latest + - name: firefox + version: latest + - name: safari + version: 9..latest + - name: iphone + version: latest + - name: ie + version: 9..latest + - name: microsoftedge + version: 13..latest diff --git a/node_modules/events/.github/FUNDING.yml b/node_modules/events/.github/FUNDING.yml new file mode 100644 index 0000000000..8b8cb78ba9 --- /dev/null +++ b/node_modules/events/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/events +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/events/.travis.yml b/node_modules/events/.travis.yml new file mode 100644 index 0000000000..486dc3c4c1 --- /dev/null +++ b/node_modules/events/.travis.yml @@ -0,0 +1,18 @@ +dist: xenial +os: linux +language: node_js +node_js: + - 'stable' + - 'lts/*' + - '0.12' +script: + - npm test + - if [ "${TRAVIS_PULL_REQUEST}" = "false" ] && [ "${TRAVIS_NODE_VERSION}" = "stable" ]; then npm run test:browsers; fi +addons: + sauce_connect: true + hosts: + - airtap.local +env: + global: + - secure: XcBiD8yReflut9q7leKsigDZ0mI3qTKH+QrNVY8DaqlomJOZw8aOrVuX9Jz12l86ZJ41nbxmKnRNkFzcVr9mbP9YaeTb3DpeOBWmvaoSfud9Wnc16VfXtc1FCcwDhSVcSiM3UtnrmFU5cH+Dw1LPh5PbfylYOS/nJxUvG0FFLqI= + - secure: jNWtEbqhUdQ0xXDHvCYfUbKYeJCi6a7B4LsrcxYCyWWn4NIgncE5x2YbB+FSUUFVYfz0dsn5RKP1oHB99f0laUEo18HBNkrAS/rtyOdVzcpJjbQ6kgSILGjnJD/Ty1B57Rcz3iyev5Y7bLZ6Y1FbDnk/i9/l0faOGz8vTC3Vdkc= diff --git a/node_modules/events/History.md b/node_modules/events/History.md new file mode 100644 index 0000000000..f48bf210da --- /dev/null +++ b/node_modules/events/History.md @@ -0,0 +1,118 @@ +# 3.3.0 + + - Support EventTarget emitters in `events.once` from Node.js 12.11.0. + + Now you can use the `events.once` function with objects that implement the EventTarget interface. This interface is used widely in + the DOM and other web APIs. + + ```js + var events = require('events'); + var assert = require('assert'); + + async function connect() { + var ws = new WebSocket('wss://example.com'); + await events.once(ws, 'open'); + assert(ws.readyState === WebSocket.OPEN); + } + + async function onClick() { + await events.once(document.body, 'click'); + alert('you clicked the page!'); + } + ``` + +# 3.2.0 + + - Add `events.once` from Node.js 11.13.0. + + To use this function, Promises must be supported in the environment. Use a polyfill like `es6-promise` if you support older browsers. + +# 3.1.0 (2020-01-08) + +`events` now matches the Node.js 11.12.0 API. + + - pass through return value in wrapped `emitter.once()` listeners + + Now, this works: + ```js + emitter.once('myevent', function () { return 1; }); + var listener = emitter.rawListeners('myevent')[0] + assert(listener() === 1); + ``` + Previously, `listener()` would return undefined regardless of the implementation. + + Ported from https://github.com/nodejs/node/commit/acc506c2d2771dab8d7bba6d3452bc5180dff7cf + + - Reduce code duplication in listener type check ([#67](https://github.com/Gozala/events/pull/67) by [@friederbluemle](https://github.com/friederbluemle)). + - Improve `emitter.once()` performance in some engines + +# 3.0.0 (2018-05-25) + +**This version drops support for IE8.** `events` no longer includes polyfills +for ES5 features. If you need to support older environments, use an ES5 shim +like [es5-shim](https://npmjs.com/package/es5-shim). Both the shim and sham +versions of es5-shim are necessary. + + - Update to events code from Node.js 10.x + - (semver major) Adds `off()` method + - Port more tests from Node.js + - Switch browser tests to airtap, making things more reliable + +# 2.1.0 (2018-05-25) + + - add Emitter#rawListeners from Node.js v9.4 + +# 2.0.0 (2018-02-02) + + - Update to events code from node.js 8.x + - Adds `prependListener()` and `prependOnceListener()` + - Adds `eventNames()` method + - (semver major) Unwrap `once()` listeners in `listeners()` + - copy tests from node.js + +Note that this version doubles the gzipped size, jumping from 1.1KB to 2.1KB, +due to new methods and runtime performance improvements. Be aware of that when +upgrading. + +# 1.1.1 (2016-06-22) + + - add more context to errors if they are not instanceof Error + +# 1.1.0 (2015-09-29) + + - add Emitter#listerCount (to match node v4 api) + +# 1.0.2 (2014-08-28) + + - remove un-reachable code + - update devDeps + +## 1.0.1 / 2014-05-11 + + - check for console.trace before using it + +## 1.0.0 / 2013-12-10 + + - Update to latest events code from node.js 0.10 + - copy tests from node.js + +## 0.4.0 / 2011-07-03 ## + + - Switching to graphquire@0.8.0 + +## 0.3.0 / 2011-07-03 ## + + - Switching to URL based module require. + +## 0.2.0 / 2011-06-10 ## + + - Simplified package structure. + - Graphquire for dependency management. + +## 0.1.1 / 2011-05-16 ## + + - Unhandled errors are logged via console.error + +## 0.1.0 / 2011-04-22 ## + + - Initial release diff --git a/node_modules/events/LICENSE b/node_modules/events/LICENSE new file mode 100644 index 0000000000..52ed3b0a63 --- /dev/null +++ b/node_modules/events/LICENSE @@ -0,0 +1,22 @@ +MIT + +Copyright Joyent, Inc. and other Node contributors. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to permit +persons to whom the Software is furnished to do so, subject to the +following conditions: + +The above copyright notice and this permission notice shall be included +in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/events/Readme.md b/node_modules/events/Readme.md new file mode 100644 index 0000000000..80849c0b2d --- /dev/null +++ b/node_modules/events/Readme.md @@ -0,0 +1,50 @@ +# events [![Build Status](https://travis-ci.org/Gozala/events.png?branch=master)](https://travis-ci.org/Gozala/events) + +> Node's event emitter for all engines. + +This implements the Node.js [`events`][node.js docs] module for environments that do not have it, like browsers. + +> `events` currently matches the **Node.js 11.13.0** API. + +Note that the `events` module uses ES5 features. If you need to support very old browsers like IE8, use a shim like [`es5-shim`](https://www.npmjs.com/package/es5-shim). You need both the shim and the sham versions of `es5-shim`. + +This module is maintained, but only by very few people. If you'd like to help, let us know in the [Maintainer Needed](https://github.com/Gozala/events/issues/43) issue! + +## Install + +You usually do not have to install `events` yourself! If your code runs in Node.js, `events` is built in. If your code runs in the browser, bundlers like [browserify](https://github.com/browserify/browserify) or [webpack](https://github.com/webpack/webpack) also include the `events` module. + +But if none of those apply, with npm do: + +``` +npm install events +``` + +## Usage + +```javascript +var EventEmitter = require('events') + +var ee = new EventEmitter() +ee.on('message', function (text) { + console.log(text) +}) +ee.emit('message', 'hello world') +``` + +## API + +See the [Node.js EventEmitter docs][node.js docs]. `events` currently matches the Node.js 11.13.0 API. + +## Contributing + +PRs are very welcome! The main way to contribute to `events` is by porting features, bugfixes and tests from Node.js. Ideally, code contributions to this module are copy-pasted from Node.js and transpiled to ES5, rather than reimplemented from scratch. Matching the Node.js code as closely as possible makes maintenance simpler when new changes land in Node.js. +This module intends to provide exactly the same API as Node.js, so features that are not available in the core `events` module will not be accepted. Feature requests should instead be directed at [nodejs/node](https://github.com/nodejs/node) and will be added to this module once they are implemented in Node.js. + +If there is a difference in behaviour between Node.js's `events` module and this module, please open an issue! + +## License + +[MIT](./LICENSE) + +[node.js docs]: https://nodejs.org/dist/v11.13.0/docs/api/events.html diff --git a/node_modules/events/events.js b/node_modules/events/events.js new file mode 100644 index 0000000000..34b69a0b4a --- /dev/null +++ b/node_modules/events/events.js @@ -0,0 +1,497 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +var R = typeof Reflect === 'object' ? Reflect : null +var ReflectApply = R && typeof R.apply === 'function' + ? R.apply + : function ReflectApply(target, receiver, args) { + return Function.prototype.apply.call(target, receiver, args); + } + +var ReflectOwnKeys +if (R && typeof R.ownKeys === 'function') { + ReflectOwnKeys = R.ownKeys +} else if (Object.getOwnPropertySymbols) { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target) + .concat(Object.getOwnPropertySymbols(target)); + }; +} else { + ReflectOwnKeys = function ReflectOwnKeys(target) { + return Object.getOwnPropertyNames(target); + }; +} + +function ProcessEmitWarning(warning) { + if (console && console.warn) console.warn(warning); +} + +var NumberIsNaN = Number.isNaN || function NumberIsNaN(value) { + return value !== value; +} + +function EventEmitter() { + EventEmitter.init.call(this); +} +module.exports = EventEmitter; +module.exports.once = once; + +// Backwards-compat with node 0.10.x +EventEmitter.EventEmitter = EventEmitter; + +EventEmitter.prototype._events = undefined; +EventEmitter.prototype._eventsCount = 0; +EventEmitter.prototype._maxListeners = undefined; + +// By default EventEmitters will print a warning if more than 10 listeners are +// added to it. This is a useful default which helps finding memory leaks. +var defaultMaxListeners = 10; + +function checkListener(listener) { + if (typeof listener !== 'function') { + throw new TypeError('The "listener" argument must be of type Function. Received type ' + typeof listener); + } +} + +Object.defineProperty(EventEmitter, 'defaultMaxListeners', { + enumerable: true, + get: function() { + return defaultMaxListeners; + }, + set: function(arg) { + if (typeof arg !== 'number' || arg < 0 || NumberIsNaN(arg)) { + throw new RangeError('The value of "defaultMaxListeners" is out of range. It must be a non-negative number. Received ' + arg + '.'); + } + defaultMaxListeners = arg; + } +}); + +EventEmitter.init = function() { + + if (this._events === undefined || + this._events === Object.getPrototypeOf(this)._events) { + this._events = Object.create(null); + this._eventsCount = 0; + } + + this._maxListeners = this._maxListeners || undefined; +}; + +// Obviously not all Emitters should be limited to 10. This function allows +// that to be increased. Set to zero for unlimited. +EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) { + if (typeof n !== 'number' || n < 0 || NumberIsNaN(n)) { + throw new RangeError('The value of "n" is out of range. It must be a non-negative number. Received ' + n + '.'); + } + this._maxListeners = n; + return this; +}; + +function _getMaxListeners(that) { + if (that._maxListeners === undefined) + return EventEmitter.defaultMaxListeners; + return that._maxListeners; +} + +EventEmitter.prototype.getMaxListeners = function getMaxListeners() { + return _getMaxListeners(this); +}; + +EventEmitter.prototype.emit = function emit(type) { + var args = []; + for (var i = 1; i < arguments.length; i++) args.push(arguments[i]); + var doError = (type === 'error'); + + var events = this._events; + if (events !== undefined) + doError = (doError && events.error === undefined); + else if (!doError) + return false; + + // If there is no 'error' event listener then throw. + if (doError) { + var er; + if (args.length > 0) + er = args[0]; + if (er instanceof Error) { + // Note: The comments on the `throw` lines are intentional, they show + // up in Node's output if this results in an unhandled exception. + throw er; // Unhandled 'error' event + } + // At least give some kind of context to the user + var err = new Error('Unhandled error.' + (er ? ' (' + er.message + ')' : '')); + err.context = er; + throw err; // Unhandled 'error' event + } + + var handler = events[type]; + + if (handler === undefined) + return false; + + if (typeof handler === 'function') { + ReflectApply(handler, this, args); + } else { + var len = handler.length; + var listeners = arrayClone(handler, len); + for (var i = 0; i < len; ++i) + ReflectApply(listeners[i], this, args); + } + + return true; +}; + +function _addListener(target, type, listener, prepend) { + var m; + var events; + var existing; + + checkListener(listener); + + events = target._events; + if (events === undefined) { + events = target._events = Object.create(null); + target._eventsCount = 0; + } else { + // To avoid recursion in the case that type === "newListener"! Before + // adding it to the listeners, first emit "newListener". + if (events.newListener !== undefined) { + target.emit('newListener', type, + listener.listener ? listener.listener : listener); + + // Re-assign `events` because a newListener handler could have caused the + // this._events to be assigned to a new object + events = target._events; + } + existing = events[type]; + } + + if (existing === undefined) { + // Optimize the case of one listener. Don't need the extra array object. + existing = events[type] = listener; + ++target._eventsCount; + } else { + if (typeof existing === 'function') { + // Adding the second element, need to change to array. + existing = events[type] = + prepend ? [listener, existing] : [existing, listener]; + // If we've already got an array, just append. + } else if (prepend) { + existing.unshift(listener); + } else { + existing.push(listener); + } + + // Check for listener leak + m = _getMaxListeners(target); + if (m > 0 && existing.length > m && !existing.warned) { + existing.warned = true; + // No error code for this since it is a Warning + // eslint-disable-next-line no-restricted-syntax + var w = new Error('Possible EventEmitter memory leak detected. ' + + existing.length + ' ' + String(type) + ' listeners ' + + 'added. Use emitter.setMaxListeners() to ' + + 'increase limit'); + w.name = 'MaxListenersExceededWarning'; + w.emitter = target; + w.type = type; + w.count = existing.length; + ProcessEmitWarning(w); + } + } + + return target; +} + +EventEmitter.prototype.addListener = function addListener(type, listener) { + return _addListener(this, type, listener, false); +}; + +EventEmitter.prototype.on = EventEmitter.prototype.addListener; + +EventEmitter.prototype.prependListener = + function prependListener(type, listener) { + return _addListener(this, type, listener, true); + }; + +function onceWrapper() { + if (!this.fired) { + this.target.removeListener(this.type, this.wrapFn); + this.fired = true; + if (arguments.length === 0) + return this.listener.call(this.target); + return this.listener.apply(this.target, arguments); + } +} + +function _onceWrap(target, type, listener) { + var state = { fired: false, wrapFn: undefined, target: target, type: type, listener: listener }; + var wrapped = onceWrapper.bind(state); + wrapped.listener = listener; + state.wrapFn = wrapped; + return wrapped; +} + +EventEmitter.prototype.once = function once(type, listener) { + checkListener(listener); + this.on(type, _onceWrap(this, type, listener)); + return this; +}; + +EventEmitter.prototype.prependOnceListener = + function prependOnceListener(type, listener) { + checkListener(listener); + this.prependListener(type, _onceWrap(this, type, listener)); + return this; + }; + +// Emits a 'removeListener' event if and only if the listener was removed. +EventEmitter.prototype.removeListener = + function removeListener(type, listener) { + var list, events, position, i, originalListener; + + checkListener(listener); + + events = this._events; + if (events === undefined) + return this; + + list = events[type]; + if (list === undefined) + return this; + + if (list === listener || list.listener === listener) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else { + delete events[type]; + if (events.removeListener) + this.emit('removeListener', type, list.listener || listener); + } + } else if (typeof list !== 'function') { + position = -1; + + for (i = list.length - 1; i >= 0; i--) { + if (list[i] === listener || list[i].listener === listener) { + originalListener = list[i].listener; + position = i; + break; + } + } + + if (position < 0) + return this; + + if (position === 0) + list.shift(); + else { + spliceOne(list, position); + } + + if (list.length === 1) + events[type] = list[0]; + + if (events.removeListener !== undefined) + this.emit('removeListener', type, originalListener || listener); + } + + return this; + }; + +EventEmitter.prototype.off = EventEmitter.prototype.removeListener; + +EventEmitter.prototype.removeAllListeners = + function removeAllListeners(type) { + var listeners, events, i; + + events = this._events; + if (events === undefined) + return this; + + // not listening for removeListener, no need to emit + if (events.removeListener === undefined) { + if (arguments.length === 0) { + this._events = Object.create(null); + this._eventsCount = 0; + } else if (events[type] !== undefined) { + if (--this._eventsCount === 0) + this._events = Object.create(null); + else + delete events[type]; + } + return this; + } + + // emit removeListener for all listeners on all events + if (arguments.length === 0) { + var keys = Object.keys(events); + var key; + for (i = 0; i < keys.length; ++i) { + key = keys[i]; + if (key === 'removeListener') continue; + this.removeAllListeners(key); + } + this.removeAllListeners('removeListener'); + this._events = Object.create(null); + this._eventsCount = 0; + return this; + } + + listeners = events[type]; + + if (typeof listeners === 'function') { + this.removeListener(type, listeners); + } else if (listeners !== undefined) { + // LIFO order + for (i = listeners.length - 1; i >= 0; i--) { + this.removeListener(type, listeners[i]); + } + } + + return this; + }; + +function _listeners(target, type, unwrap) { + var events = target._events; + + if (events === undefined) + return []; + + var evlistener = events[type]; + if (evlistener === undefined) + return []; + + if (typeof evlistener === 'function') + return unwrap ? [evlistener.listener || evlistener] : [evlistener]; + + return unwrap ? + unwrapListeners(evlistener) : arrayClone(evlistener, evlistener.length); +} + +EventEmitter.prototype.listeners = function listeners(type) { + return _listeners(this, type, true); +}; + +EventEmitter.prototype.rawListeners = function rawListeners(type) { + return _listeners(this, type, false); +}; + +EventEmitter.listenerCount = function(emitter, type) { + if (typeof emitter.listenerCount === 'function') { + return emitter.listenerCount(type); + } else { + return listenerCount.call(emitter, type); + } +}; + +EventEmitter.prototype.listenerCount = listenerCount; +function listenerCount(type) { + var events = this._events; + + if (events !== undefined) { + var evlistener = events[type]; + + if (typeof evlistener === 'function') { + return 1; + } else if (evlistener !== undefined) { + return evlistener.length; + } + } + + return 0; +} + +EventEmitter.prototype.eventNames = function eventNames() { + return this._eventsCount > 0 ? ReflectOwnKeys(this._events) : []; +}; + +function arrayClone(arr, n) { + var copy = new Array(n); + for (var i = 0; i < n; ++i) + copy[i] = arr[i]; + return copy; +} + +function spliceOne(list, index) { + for (; index + 1 < list.length; index++) + list[index] = list[index + 1]; + list.pop(); +} + +function unwrapListeners(arr) { + var ret = new Array(arr.length); + for (var i = 0; i < ret.length; ++i) { + ret[i] = arr[i].listener || arr[i]; + } + return ret; +} + +function once(emitter, name) { + return new Promise(function (resolve, reject) { + function errorListener(err) { + emitter.removeListener(name, resolver); + reject(err); + } + + function resolver() { + if (typeof emitter.removeListener === 'function') { + emitter.removeListener('error', errorListener); + } + resolve([].slice.call(arguments)); + }; + + eventTargetAgnosticAddListener(emitter, name, resolver, { once: true }); + if (name !== 'error') { + addErrorHandlerIfEventEmitter(emitter, errorListener, { once: true }); + } + }); +} + +function addErrorHandlerIfEventEmitter(emitter, handler, flags) { + if (typeof emitter.on === 'function') { + eventTargetAgnosticAddListener(emitter, 'error', handler, flags); + } +} + +function eventTargetAgnosticAddListener(emitter, name, listener, flags) { + if (typeof emitter.on === 'function') { + if (flags.once) { + emitter.once(name, listener); + } else { + emitter.on(name, listener); + } + } else if (typeof emitter.addEventListener === 'function') { + // EventTarget does not have `error` event semantics like Node + // EventEmitters, we do not listen for `error` events here. + emitter.addEventListener(name, function wrapListener(arg) { + // IE does not have builtin `{ once: true }` support so we + // have to do it manually. + if (flags.once) { + emitter.removeEventListener(name, wrapListener); + } + listener(arg); + }); + } else { + throw new TypeError('The "emitter" argument must be of type EventEmitter. Received type ' + typeof emitter); + } +} diff --git a/node_modules/events/package.json b/node_modules/events/package.json new file mode 100644 index 0000000000..b9580d8814 --- /dev/null +++ b/node_modules/events/package.json @@ -0,0 +1,37 @@ +{ + "name": "events", + "version": "3.3.0", + "description": "Node's event emitter for all engines.", + "keywords": [ + "events", + "eventEmitter", + "eventDispatcher", + "listeners" + ], + "author": "Irakli Gozalishvili (http://jeditoolkit.com)", + "repository": { + "type": "git", + "url": "git://github.com/Gozala/events.git", + "web": "https://github.com/Gozala/events" + }, + "bugs": { + "url": "http://github.com/Gozala/events/issues/" + }, + "main": "./events.js", + "engines": { + "node": ">=0.8.x" + }, + "devDependencies": { + "airtap": "^1.0.0", + "functions-have-names": "^1.2.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "isarray": "^2.0.5", + "tape": "^5.0.0" + }, + "scripts": { + "test": "node tests/index.js", + "test:browsers": "airtap -- tests/index.js" + }, + "license": "MIT" +} diff --git a/node_modules/events/security.md b/node_modules/events/security.md new file mode 100644 index 0000000000..a14ace6a57 --- /dev/null +++ b/node_modules/events/security.md @@ -0,0 +1,10 @@ +# Security Policy + +## Supported Versions +Only the latest major version is supported at any given time. + +## Reporting a Vulnerability + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. diff --git a/node_modules/events/tests/add-listeners.js b/node_modules/events/tests/add-listeners.js new file mode 100644 index 0000000000..9b578272ba --- /dev/null +++ b/node_modules/events/tests/add-listeners.js @@ -0,0 +1,111 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var EventEmitter = require('../'); + +{ + var ee = new EventEmitter(); + var events_new_listener_emitted = []; + var listeners_new_listener_emitted = []; + + // Sanity check + assert.strictEqual(ee.addListener, ee.on); + + ee.on('newListener', function(event, listener) { + // Don't track newListener listeners. + if (event === 'newListener') + return; + + events_new_listener_emitted.push(event); + listeners_new_listener_emitted.push(listener); + }); + + var hello = common.mustCall(function(a, b) { + assert.strictEqual('a', a); + assert.strictEqual('b', b); + }); + + ee.once('newListener', function(name, listener) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(listener, hello); + + var listeners = this.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + }); + + ee.on('hello', hello); + ee.once('foo', assert.fail); + + assert.ok(Array.isArray(events_new_listener_emitted)); + assert.strictEqual(events_new_listener_emitted.length, 2); + assert.strictEqual(events_new_listener_emitted[0], 'hello'); + assert.strictEqual(events_new_listener_emitted[1], 'foo'); + + assert.ok(Array.isArray(listeners_new_listener_emitted)); + assert.strictEqual(listeners_new_listener_emitted.length, 2); + assert.strictEqual(listeners_new_listener_emitted[0], hello); + assert.strictEqual(listeners_new_listener_emitted[1], assert.fail); + + ee.emit('hello', 'a', 'b'); +} + +// just make sure that this doesn't throw: +{ + var f = new EventEmitter(); + + f.setMaxListeners(0); +} + +{ + var listen1 = function() {}; + var listen2 = function() {}; + var ee = new EventEmitter(); + + ee.once('newListener', function() { + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + ee.once('newListener', function() { + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + }); + ee.on('hello', listen2); + }); + ee.on('hello', listen1); + // The order of listeners on an event is not always the order in which the + // listeners were added. + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listen2); + assert.strictEqual(listeners[1], listen1); +} + +// Verify that the listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.on('foo', null); +}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); diff --git a/node_modules/events/tests/check-listener-leaks.js b/node_modules/events/tests/check-listener-leaks.js new file mode 100644 index 0000000000..7fce48f37b --- /dev/null +++ b/node_modules/events/tests/check-listener-leaks.js @@ -0,0 +1,101 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var events = require('../'); + +// Redirect warning output to tape. +var consoleWarn = console.warn; +console.warn = common.test.comment; + +common.test.on('end', function () { + console.warn = consoleWarn; +}); + +// default +{ + var e = new events.EventEmitter(); + + for (var i = 0; i < 10; i++) { + e.on('default', common.mustNotCall()); + } + assert.ok(!e._events['default'].hasOwnProperty('warned')); + e.on('default', common.mustNotCall()); + assert.ok(e._events['default'].warned); + + // specific + e.setMaxListeners(5); + for (var i = 0; i < 5; i++) { + e.on('specific', common.mustNotCall()); + } + assert.ok(!e._events['specific'].hasOwnProperty('warned')); + e.on('specific', common.mustNotCall()); + assert.ok(e._events['specific'].warned); + + // only one + e.setMaxListeners(1); + e.on('only one', common.mustNotCall()); + assert.ok(!e._events['only one'].hasOwnProperty('warned')); + e.on('only one', common.mustNotCall()); + assert.ok(e._events['only one'].hasOwnProperty('warned')); + + // unlimited + e.setMaxListeners(0); + for (var i = 0; i < 1000; i++) { + e.on('unlimited', common.mustNotCall()); + } + assert.ok(!e._events['unlimited'].hasOwnProperty('warned')); +} + +// process-wide +{ + events.EventEmitter.defaultMaxListeners = 42; + var e = new events.EventEmitter(); + + for (var i = 0; i < 42; ++i) { + e.on('fortytwo', common.mustNotCall()); + } + assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); + e.on('fortytwo', common.mustNotCall()); + assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); + delete e._events['fortytwo'].warned; + + events.EventEmitter.defaultMaxListeners = 44; + e.on('fortytwo', common.mustNotCall()); + assert.ok(!e._events['fortytwo'].hasOwnProperty('warned')); + e.on('fortytwo', common.mustNotCall()); + assert.ok(e._events['fortytwo'].hasOwnProperty('warned')); +} + +// but _maxListeners still has precedence over defaultMaxListeners +{ + events.EventEmitter.defaultMaxListeners = 42; + var e = new events.EventEmitter(); + e.setMaxListeners(1); + e.on('uno', common.mustNotCall()); + assert.ok(!e._events['uno'].hasOwnProperty('warned')); + e.on('uno', common.mustNotCall()); + assert.ok(e._events['uno'].hasOwnProperty('warned')); + + // chainable + assert.strictEqual(e, e.setMaxListeners(1)); +} diff --git a/node_modules/events/tests/common.js b/node_modules/events/tests/common.js new file mode 100644 index 0000000000..49569b05f5 --- /dev/null +++ b/node_modules/events/tests/common.js @@ -0,0 +1,104 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var test = require('tape'); +var assert = require('assert'); + +var noop = function() {}; + +var mustCallChecks = []; + +function runCallChecks(exitCode) { + if (exitCode !== 0) return; + + var failed = filter(mustCallChecks, function(context) { + if ('minimum' in context) { + context.messageSegment = 'at least ' + context.minimum; + return context.actual < context.minimum; + } else { + context.messageSegment = 'exactly ' + context.exact; + return context.actual !== context.exact; + } + }); + + for (var i = 0; i < failed.length; i++) { + var context = failed[i]; + console.log('Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual); + // IE8 has no .stack + if (context.stack) console.log(context.stack.split('\n').slice(2).join('\n')); + } + + assert.strictEqual(failed.length, 0); +} + +exports.mustCall = function(fn, exact) { + return _mustCallInner(fn, exact, 'exact'); +}; + +function _mustCallInner(fn, criteria, field) { + if (typeof criteria == 'undefined') criteria = 1; + + if (typeof fn === 'number') { + criteria = fn; + fn = noop; + } else if (fn === undefined) { + fn = noop; + } + + if (typeof criteria !== 'number') + throw new TypeError('Invalid ' + field + ' value: ' + criteria); + + var context = { + actual: 0, + stack: (new Error()).stack, + name: fn.name || '' + }; + + context[field] = criteria; + + // add the exit listener only once to avoid listener leak warnings + if (mustCallChecks.length === 0) test.onFinish(function() { runCallChecks(0); }); + + mustCallChecks.push(context); + + return function() { + context.actual++; + return fn.apply(this, arguments); + }; +} + +exports.mustNotCall = function(msg) { + return function mustNotCall() { + assert.fail(msg || 'function should not have been called'); + }; +}; + +function filter(arr, fn) { + if (arr.filter) return arr.filter(fn); + var filtered = []; + for (var i = 0; i < arr.length; i++) { + if (fn(arr[i], i, arr)) filtered.push(arr[i]); + } + return filtered +} diff --git a/node_modules/events/tests/errors.js b/node_modules/events/tests/errors.js new file mode 100644 index 0000000000..a23df437f0 --- /dev/null +++ b/node_modules/events/tests/errors.js @@ -0,0 +1,13 @@ +'use strict'; +var assert = require('assert'); +var EventEmitter = require('../'); + +var EE = new EventEmitter(); + +assert.throws(function () { + EE.emit('error', 'Accepts a string'); +}, 'Error: Unhandled error. (Accepts a string)'); + +assert.throws(function () { + EE.emit('error', { message: 'Error!' }); +}, 'Unhandled error. ([object Object])'); diff --git a/node_modules/events/tests/events-list.js b/node_modules/events/tests/events-list.js new file mode 100644 index 0000000000..08aa62177e --- /dev/null +++ b/node_modules/events/tests/events-list.js @@ -0,0 +1,28 @@ +'use strict'; + +var EventEmitter = require('../'); +var assert = require('assert'); + +var EE = new EventEmitter(); +var m = function() {}; +EE.on('foo', function() {}); +assert.equal(1, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); +EE.on('bar', m); +assert.equal(2, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); +assert.equal('bar', EE.eventNames()[1]); +EE.removeListener('bar', m); +assert.equal(1, EE.eventNames().length); +assert.equal('foo', EE.eventNames()[0]); + +if (typeof Symbol !== 'undefined') { + var s = Symbol('s'); + EE.on(s, m); + assert.equal(2, EE.eventNames().length); + assert.equal('foo', EE.eventNames()[0]); + assert.equal(s, EE.eventNames()[1]); + EE.removeListener(s, m); + assert.equal(1, EE.eventNames().length); + assert.equal('foo', EE.eventNames()[0]); +} diff --git a/node_modules/events/tests/events-once.js b/node_modules/events/tests/events-once.js new file mode 100644 index 0000000000..dae864963d --- /dev/null +++ b/node_modules/events/tests/events-once.js @@ -0,0 +1,234 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../').EventEmitter; +var once = require('../').once; +var has = require('has'); +var assert = require('assert'); + +function Event(type) { + this.type = type; +} + +function EventTargetMock() { + this.events = {}; + + this.addEventListener = common.mustCall(this.addEventListener); + this.removeEventListener = common.mustCall(this.removeEventListener); +} + +EventTargetMock.prototype.addEventListener = function addEventListener(name, listener, options) { + if (!(name in this.events)) { + this.events[name] = { listeners: [], options: options || {} } + } + this.events[name].listeners.push(listener); +}; + +EventTargetMock.prototype.removeEventListener = function removeEventListener(name, callback) { + if (!(name in this.events)) { + return; + } + var event = this.events[name]; + var stack = event.listeners; + + for (var i = 0, l = stack.length; i < l; i++) { + if (stack[i] === callback) { + stack.splice(i, 1); + if (stack.length === 0) { + delete this.events[name]; + } + return; + } + } +}; + +EventTargetMock.prototype.dispatchEvent = function dispatchEvent(arg) { + if (!(arg.type in this.events)) { + return true; + } + + var event = this.events[arg.type]; + var stack = event.listeners.slice(); + + for (var i = 0, l = stack.length; i < l; i++) { + stack[i].call(null, arg); + if (event.options.once) { + this.removeEventListener(arg.type, stack[i]); + } + } + return !arg.defaultPrevented; +}; + +function onceAnEvent() { + var ee = new EventEmitter(); + + process.nextTick(function () { + ee.emit('myevent', 42); + }); + + return once(ee, 'myevent').then(function (args) { + var value = args[0] + assert.strictEqual(value, 42); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceAnEventWithTwoArgs() { + var ee = new EventEmitter(); + + process.nextTick(function () { + ee.emit('myevent', 42, 24); + }); + + return once(ee, 'myevent').then(function (value) { + assert.strictEqual(value.length, 2); + assert.strictEqual(value[0], 42); + assert.strictEqual(value[1], 24); + }); +} + +function catchesErrors() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + var err; + process.nextTick(function () { + ee.emit('error', expected); + }); + + return once(ee, 'myevent').then(function () { + throw new Error('should reject') + }, function (err) { + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function stopListeningAfterCatchingError() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + var err; + process.nextTick(function () { + ee.emit('error', expected); + ee.emit('myevent', 42, 24); + }); + + // process.on('multipleResolves', common.mustNotCall()); + + return once(ee, 'myevent').then(common.mustNotCall, function (err) { + // process.removeAllListeners('multipleResolves'); + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceError() { + var ee = new EventEmitter(); + + var expected = new Error('kaboom'); + process.nextTick(function () { + ee.emit('error', expected); + }); + + var promise = once(ee, 'error'); + assert.strictEqual(ee.listenerCount('error'), 1); + return promise.then(function (args) { + var err = args[0] + assert.strictEqual(err, expected); + assert.strictEqual(ee.listenerCount('error'), 0); + assert.strictEqual(ee.listenerCount('myevent'), 0); + }); +} + +function onceWithEventTarget() { + var et = new EventTargetMock(); + var event = new Event('myevent'); + process.nextTick(function () { + et.dispatchEvent(event); + }); + return once(et, 'myevent').then(function (args) { + var value = args[0]; + assert.strictEqual(value, event); + assert.strictEqual(has(et.events, 'myevent'), false); + }); +} + +function onceWithEventTargetError() { + var et = new EventTargetMock(); + var error = new Event('error'); + process.nextTick(function () { + et.dispatchEvent(error); + }); + return once(et, 'error').then(function (args) { + var err = args[0]; + assert.strictEqual(err, error); + assert.strictEqual(has(et.events, 'error'), false); + }); +} + +function prioritizesEventEmitter() { + var ee = new EventEmitter(); + ee.addEventListener = assert.fail; + ee.removeAllListeners = assert.fail; + process.nextTick(function () { + ee.emit('foo'); + }); + return once(ee, 'foo'); +} + +var allTests = [ + onceAnEvent(), + onceAnEventWithTwoArgs(), + catchesErrors(), + stopListeningAfterCatchingError(), + onceError(), + onceWithEventTarget(), + onceWithEventTargetError(), + prioritizesEventEmitter() +]; + +var hasBrowserEventTarget = false; +try { + hasBrowserEventTarget = typeof (new window.EventTarget().addEventListener) === 'function' && + new window.Event('xyz').type === 'xyz'; +} catch (err) {} + +if (hasBrowserEventTarget) { + var onceWithBrowserEventTarget = function onceWithBrowserEventTarget() { + var et = new window.EventTarget(); + var event = new window.Event('myevent'); + process.nextTick(function () { + et.dispatchEvent(event); + }); + return once(et, 'myevent').then(function (args) { + var value = args[0]; + assert.strictEqual(value, event); + assert.strictEqual(has(et.events, 'myevent'), false); + }); + } + + var onceWithBrowserEventTargetError = function onceWithBrowserEventTargetError() { + var et = new window.EventTarget(); + var error = new window.Event('error'); + process.nextTick(function () { + et.dispatchEvent(error); + }); + return once(et, 'error').then(function (args) { + var err = args[0]; + assert.strictEqual(err, error); + assert.strictEqual(has(et.events, 'error'), false); + }); + } + + common.test.comment('Testing with browser built-in EventTarget'); + allTests.push([ + onceWithBrowserEventTarget(), + onceWithBrowserEventTargetError() + ]); +} + +module.exports = Promise.all(allTests); diff --git a/node_modules/events/tests/index.js b/node_modules/events/tests/index.js new file mode 100644 index 0000000000..2d739e670c --- /dev/null +++ b/node_modules/events/tests/index.js @@ -0,0 +1,64 @@ +var test = require('tape'); +var functionsHaveNames = require('functions-have-names'); +var hasSymbols = require('has-symbols'); + +require('./legacy-compat'); +var common = require('./common'); + +// we do this to easily wrap each file in a mocha test +// and also have browserify be able to statically analyze this file +var orig_require = require; +var require = function(file) { + test(file, function(t) { + // Store the tape object so tests can access it. + t.on('end', function () { delete common.test; }); + common.test = t; + + try { + var exp = orig_require(file); + if (exp && exp.then) { + exp.then(function () { t.end(); }, t.fail); + return; + } + } catch (err) { + t.fail(err); + } + t.end(); + }); +}; + +require('./add-listeners.js'); +require('./check-listener-leaks.js'); +require('./errors.js'); +require('./events-list.js'); +if (typeof Promise === 'function') { + require('./events-once.js'); +} else { + // Promise support is not available. + test('./events-once.js', { skip: true }, function () {}); +} +require('./listener-count.js'); +require('./listeners-side-effects.js'); +require('./listeners.js'); +require('./max-listeners.js'); +if (functionsHaveNames()) { + require('./method-names.js'); +} else { + // Function.name is not supported in IE + test('./method-names.js', { skip: true }, function () {}); +} +require('./modify-in-emit.js'); +require('./num-args.js'); +require('./once.js'); +require('./prepend.js'); +require('./set-max-listeners-side-effects.js'); +require('./special-event-names.js'); +require('./subclass.js'); +if (hasSymbols()) { + require('./symbols.js'); +} else { + // Symbol is not available. + test('./symbols.js', { skip: true }, function () {}); +} +require('./remove-all-listeners.js'); +require('./remove-listeners.js'); diff --git a/node_modules/events/tests/legacy-compat.js b/node_modules/events/tests/legacy-compat.js new file mode 100644 index 0000000000..a402be6e2f --- /dev/null +++ b/node_modules/events/tests/legacy-compat.js @@ -0,0 +1,16 @@ +// sigh... life is hard +if (!global.console) { + console = {} +} + +var fns = ['log', 'error', 'trace']; +for (var i=0 ; ifoo should not be emitted'); +} + +e.once('foo', remove); +e.removeListener('foo', remove); +e.emit('foo'); + +e.once('e', common.mustCall(function() { + e.emit('e'); +})); + +e.once('e', common.mustCall()); + +e.emit('e'); + +// Verify that the listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.once('foo', null); +}, /^TypeError: The "listener" argument must be of type Function. Received type object$/); + +{ + // once() has different code paths based on the number of arguments being + // emitted. Verify that all of the cases are covered. + var maxArgs = 4; + + for (var i = 0; i <= maxArgs; ++i) { + var ee = new EventEmitter(); + var args = ['foo']; + + for (var j = 0; j < i; ++j) + args.push(j); + + ee.once('foo', common.mustCall(function() { + var params = Array.prototype.slice.call(arguments); + var restArgs = args.slice(1); + assert.ok(Array.isArray(params)); + assert.strictEqual(params.length, restArgs.length); + for (var index = 0; index < params.length; index++) { + var param = params[index]; + assert.strictEqual(param, restArgs[index]); + } + })); + + EventEmitter.prototype.emit.apply(ee, args); + } +} diff --git a/node_modules/events/tests/prepend.js b/node_modules/events/tests/prepend.js new file mode 100644 index 0000000000..79afde0bf3 --- /dev/null +++ b/node_modules/events/tests/prepend.js @@ -0,0 +1,31 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var myEE = new EventEmitter(); +var m = 0; +// This one comes last. +myEE.on('foo', common.mustCall(function () { + assert.strictEqual(m, 2); +})); + +// This one comes second. +myEE.prependListener('foo', common.mustCall(function () { + assert.strictEqual(m++, 1); +})); + +// This one comes first. +myEE.prependOnceListener('foo', + common.mustCall(function () { + assert.strictEqual(m++, 0); + })); + +myEE.emit('foo'); + +// Verify that the listener must be a function +assert.throws(function () { + var ee = new EventEmitter(); + ee.prependOnceListener('foo', null); +}, 'TypeError: The "listener" argument must be of type Function. Received type object'); diff --git a/node_modules/events/tests/remove-all-listeners.js b/node_modules/events/tests/remove-all-listeners.js new file mode 100644 index 0000000000..622941cfa6 --- /dev/null +++ b/node_modules/events/tests/remove-all-listeners.js @@ -0,0 +1,133 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var events = require('../'); +var test = require('tape'); + +function expect(expected) { + var actual = []; + test.onFinish(function() { + var sortedActual = actual.sort(); + var sortedExpected = expected.sort(); + assert.strictEqual(sortedActual.length, sortedExpected.length); + for (var index = 0; index < sortedActual.length; index++) { + var value = sortedActual[index]; + assert.strictEqual(value, sortedExpected[index]); + } + }); + function listener(name) { + actual.push(name); + } + return common.mustCall(listener, expected.length); +} + +{ + var ee = new events.EventEmitter(); + var noop = common.mustNotCall(); + ee.on('foo', noop); + ee.on('bar', noop); + ee.on('baz', noop); + ee.on('baz', noop); + var fooListeners = ee.listeners('foo'); + var barListeners = ee.listeners('bar'); + var bazListeners = ee.listeners('baz'); + ee.on('removeListener', expect(['bar', 'baz', 'baz'])); + ee.removeAllListeners('bar'); + ee.removeAllListeners('baz'); + + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], noop); + + listeners = ee.listeners('bar'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + listeners = ee.listeners('baz'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + // After calling removeAllListeners(), + // the old listeners array should stay unchanged. + assert.strictEqual(fooListeners.length, 1); + assert.strictEqual(fooListeners[0], noop); + assert.strictEqual(barListeners.length, 1); + assert.strictEqual(barListeners[0], noop); + assert.strictEqual(bazListeners.length, 2); + assert.strictEqual(bazListeners[0], noop); + assert.strictEqual(bazListeners[1], noop); + // After calling removeAllListeners(), + // new listeners arrays is different from the old. + assert.notStrictEqual(ee.listeners('bar'), barListeners); + assert.notStrictEqual(ee.listeners('baz'), bazListeners); +} + +{ + var ee = new events.EventEmitter(); + ee.on('foo', common.mustNotCall()); + ee.on('bar', common.mustNotCall()); + // Expect LIFO order + ee.on('removeListener', expect(['foo', 'bar', 'removeListener'])); + ee.on('removeListener', expect(['foo', 'bar'])); + ee.removeAllListeners(); + + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + listeners = ee.listeners('bar'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new events.EventEmitter(); + ee.on('removeListener', common.mustNotCall()); + // Check for regression where removeAllListeners() throws when + // there exists a 'removeListener' listener, but there exists + // no listeners for the provided event type. + assert.doesNotThrow(function () { ee.removeAllListeners(ee, 'foo') }); +} + +{ + var ee = new events.EventEmitter(); + var expectLength = 2; + ee.on('removeListener', function() { + assert.strictEqual(expectLength--, this.listeners('baz').length); + }); + ee.on('baz', common.mustNotCall()); + ee.on('baz', common.mustNotCall()); + ee.on('baz', common.mustNotCall()); + assert.strictEqual(ee.listeners('baz').length, expectLength + 1); + ee.removeAllListeners('baz'); + assert.strictEqual(ee.listeners('baz').length, 0); +} + +{ + var ee = new events.EventEmitter(); + assert.strictEqual(ee, ee.removeAllListeners()); +} + +{ + var ee = new events.EventEmitter(); + ee._events = undefined; + assert.strictEqual(ee, ee.removeAllListeners()); +} diff --git a/node_modules/events/tests/remove-listeners.js b/node_modules/events/tests/remove-listeners.js new file mode 100644 index 0000000000..18e4d1651f --- /dev/null +++ b/node_modules/events/tests/remove-listeners.js @@ -0,0 +1,212 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var assert = require('assert'); +var EventEmitter = require('../'); + +var listener1 = function listener1() {}; +var listener2 = function listener2() {}; + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + })); + ee.removeListener('hello', listener1); + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('removeListener', common.mustNotCall()); + ee.removeListener('hello', listener2); + + var listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener1); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('hello', listener2); + + var listeners; + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + })); + ee.removeListener('hello', listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + + function remove1() { + assert.fail('remove1 should not have been called'); + } + + function remove2() { + assert.fail('remove2 should not have been called'); + } + + ee.on('removeListener', common.mustCall(function(name, cb) { + if (cb !== remove1) return; + this.removeListener('quux', remove2); + this.emit('quux'); + }, 2)); + ee.on('quux', remove1); + ee.on('quux', remove2); + ee.removeListener('quux', remove1); +} + +{ + var ee = new EventEmitter(); + ee.on('hello', listener1); + ee.on('hello', listener2); + + var listeners; + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 1); + assert.strictEqual(listeners[0], listener2); + ee.once('removeListener', common.mustCall(function(name, cb) { + assert.strictEqual(name, 'hello'); + assert.strictEqual(cb, listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener2); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); + })); + ee.removeListener('hello', listener1); + listeners = ee.listeners('hello'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 0); +} + +{ + var ee = new EventEmitter(); + var listener3 = common.mustCall(function() { + ee.removeListener('hello', listener4); + }, 2); + var listener4 = common.mustCall(); + + ee.on('hello', listener3); + ee.on('hello', listener4); + + // listener4 will still be called although it is removed by listener 3. + ee.emit('hello'); + // This is so because the interal listener array at time of emit + // was [listener3,listener4] + + // Interal listener array [listener3] + ee.emit('hello'); +} + +{ + var ee = new EventEmitter(); + + ee.once('hello', listener1); + ee.on('removeListener', common.mustCall(function(eventName, listener) { + assert.strictEqual(eventName, 'hello'); + assert.strictEqual(listener, listener1); + })); + ee.emit('hello'); +} + +{ + var ee = new EventEmitter(); + + assert.strictEqual(ee, ee.removeListener('foo', function() {})); +} + +// Verify that the removed listener must be a function +assert.throws(function() { + var ee = new EventEmitter(); + + ee.removeListener('foo', null); +}, /^TypeError: The "listener" argument must be of type Function\. Received type object$/); + +{ + var ee = new EventEmitter(); + var listener = function() {}; + ee._events = undefined; + var e = ee.removeListener('foo', listener); + assert.strictEqual(e, ee); +} + +{ + var ee = new EventEmitter(); + + ee.on('foo', listener1); + ee.on('foo', listener2); + var listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listener1); + assert.strictEqual(listeners[1], listener2); + + ee.removeListener('foo', listener1); + assert.strictEqual(ee._events.foo, listener2); + + ee.on('foo', listener1); + listeners = ee.listeners('foo'); + assert.ok(Array.isArray(listeners)); + assert.strictEqual(listeners.length, 2); + assert.strictEqual(listeners[0], listener2); + assert.strictEqual(listeners[1], listener1); + + ee.removeListener('foo', listener1); + assert.strictEqual(ee._events.foo, listener2); +} diff --git a/node_modules/events/tests/set-max-listeners-side-effects.js b/node_modules/events/tests/set-max-listeners-side-effects.js new file mode 100644 index 0000000000..13dbb671e9 --- /dev/null +++ b/node_modules/events/tests/set-max-listeners-side-effects.js @@ -0,0 +1,31 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +require('./common'); +var assert = require('assert'); +var events = require('../'); + +var e = new events.EventEmitter(); + +if (Object.create) assert.ok(!(e._events instanceof Object)); +assert.strictEqual(Object.keys(e._events).length, 0); +e.setMaxListeners(5); +assert.strictEqual(Object.keys(e._events).length, 0); diff --git a/node_modules/events/tests/special-event-names.js b/node_modules/events/tests/special-event-names.js new file mode 100644 index 0000000000..a2f0b744a7 --- /dev/null +++ b/node_modules/events/tests/special-event-names.js @@ -0,0 +1,45 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var ee = new EventEmitter(); +var handler = function() {}; + +assert.strictEqual(ee.eventNames().length, 0); + +assert.strictEqual(ee._events.hasOwnProperty, undefined); +assert.strictEqual(ee._events.toString, undefined); + +ee.on('__defineGetter__', handler); +ee.on('toString', handler); +ee.on('__proto__', handler); + +assert.strictEqual(ee.eventNames()[0], '__defineGetter__'); +assert.strictEqual(ee.eventNames()[1], 'toString'); + +assert.strictEqual(ee.listeners('__defineGetter__').length, 1); +assert.strictEqual(ee.listeners('__defineGetter__')[0], handler); +assert.strictEqual(ee.listeners('toString').length, 1); +assert.strictEqual(ee.listeners('toString')[0], handler); + +// Only run __proto__ tests if that property can actually be set +if ({ __proto__: 'ok' }.__proto__ === 'ok') { + assert.strictEqual(ee.eventNames().length, 3); + assert.strictEqual(ee.eventNames()[2], '__proto__'); + assert.strictEqual(ee.listeners('__proto__').length, 1); + assert.strictEqual(ee.listeners('__proto__')[0], handler); + + ee.on('__proto__', common.mustCall(function(val) { + assert.strictEqual(val, 1); + })); + ee.emit('__proto__', 1); + + process.on('__proto__', common.mustCall(function(val) { + assert.strictEqual(val, 1); + })); + process.emit('__proto__', 1); +} else { + console.log('# skipped __proto__') +} diff --git a/node_modules/events/tests/subclass.js b/node_modules/events/tests/subclass.js new file mode 100644 index 0000000000..bd033fff4d --- /dev/null +++ b/node_modules/events/tests/subclass.js @@ -0,0 +1,66 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var common = require('./common'); +var test = require('tape'); +var assert = require('assert'); +var EventEmitter = require('../').EventEmitter; +var util = require('util'); + +util.inherits(MyEE, EventEmitter); + +function MyEE(cb) { + this.once(1, cb); + this.emit(1); + this.removeAllListeners(); + EventEmitter.call(this); +} + +var myee = new MyEE(common.mustCall()); + + +util.inherits(ErrorEE, EventEmitter); +function ErrorEE() { + this.emit('error', new Error('blerg')); +} + +assert.throws(function() { + new ErrorEE(); +}, /blerg/); + +test.onFinish(function() { + assert.ok(!(myee._events instanceof Object)); + assert.strictEqual(Object.keys(myee._events).length, 0); +}); + + +function MyEE2() { + EventEmitter.call(this); +} + +MyEE2.prototype = new EventEmitter(); + +var ee1 = new MyEE2(); +var ee2 = new MyEE2(); + +ee1.on('x', function() {}); + +assert.strictEqual(ee2.listenerCount('x'), 0); diff --git a/node_modules/events/tests/symbols.js b/node_modules/events/tests/symbols.js new file mode 100644 index 0000000000..0721f0ec0b --- /dev/null +++ b/node_modules/events/tests/symbols.js @@ -0,0 +1,25 @@ +'use strict'; + +var common = require('./common'); +var EventEmitter = require('../'); +var assert = require('assert'); + +var ee = new EventEmitter(); +var foo = Symbol('foo'); +var listener = common.mustCall(); + +ee.on(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 1); +assert.strictEqual(ee.listeners(foo)[0], listener); + +ee.emit(foo); + +ee.removeAllListeners(); +assert.strictEqual(ee.listeners(foo).length, 0); + +ee.on(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 1); +assert.strictEqual(ee.listeners(foo)[0], listener); + +ee.removeListener(foo, listener); +assert.strictEqual(ee.listeners(foo).length, 0); diff --git a/node_modules/form-data/License b/node_modules/form-data/License new file mode 100644 index 0000000000..c7ff12a2f8 --- /dev/null +++ b/node_modules/form-data/License @@ -0,0 +1,19 @@ +Copyright (c) 2012 Felix Geisendörfer (felix@debuggable.com) and contributors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. diff --git a/node_modules/form-data/README.md b/node_modules/form-data/README.md new file mode 100644 index 0000000000..f6b2ae9135 --- /dev/null +++ b/node_modules/form-data/README.md @@ -0,0 +1,350 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=linux:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=macos:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/v2.5.1.svg?label=windows:4.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/v2.5.1.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Array_ getHeaders( [**Array** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Array_ getHeaders( [**Array** _userHeaders_] ) +This method ads the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/README.md.bak b/node_modules/form-data/README.md.bak new file mode 100644 index 0000000000..6077db9ad6 --- /dev/null +++ b/node_modules/form-data/README.md.bak @@ -0,0 +1,350 @@ +# Form-Data [![NPM Module](https://img.shields.io/npm/v/form-data.svg)](https://www.npmjs.com/package/form-data) [![Join the chat at https://gitter.im/form-data/form-data](http://form-data.github.io/images/gitterbadge.svg)](https://gitter.im/form-data/form-data) + +A library to create readable ```"multipart/form-data"``` streams. Can be used to submit forms and file uploads to other web applications. + +The API of this library is inspired by the [XMLHttpRequest-2 FormData Interface][xhr2-fd]. + +[xhr2-fd]: http://dev.w3.org/2006/webapi/XMLHttpRequest-2/Overview.html#the-formdata-interface + +[![Linux Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=linux:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![MacOS Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=macos:4.x-12.x)](https://travis-ci.org/form-data/form-data) +[![Windows Build](https://img.shields.io/travis/form-data/form-data/master.svg?label=windows:4.x-12.x)](https://travis-ci.org/form-data/form-data) + +[![Coverage Status](https://img.shields.io/coveralls/form-data/form-data/master.svg?label=code+coverage)](https://coveralls.io/github/form-data/form-data?branch=master) +[![Dependency Status](https://img.shields.io/david/form-data/form-data.svg)](https://david-dm.org/form-data/form-data) + +## Install + +``` +npm install --save form-data +``` + +## Usage + +In this example we are constructing a form with 3 fields that contain a string, +a buffer and a file stream. + +``` javascript +var FormData = require('form-data'); +var fs = require('fs'); + +var form = new FormData(); +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_file', fs.createReadStream('/foo/bar.jpg')); +``` + +Also you can use http-response stream: + +``` javascript +var FormData = require('form-data'); +var http = require('http'); + +var form = new FormData(); + +http.request('http://nodejs.org/images/logo.png', function(response) { + form.append('my_field', 'my value'); + form.append('my_buffer', new Buffer(10)); + form.append('my_logo', response); +}); +``` + +Or @mikeal's [request](https://github.com/request/request) stream: + +``` javascript +var FormData = require('form-data'); +var request = require('request'); + +var form = new FormData(); + +form.append('my_field', 'my value'); +form.append('my_buffer', new Buffer(10)); +form.append('my_logo', request('http://nodejs.org/images/logo.png')); +``` + +In order to submit this form to a web application, call ```submit(url, [callback])``` method: + +``` javascript +form.submit('http://example.org/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +}); + +``` + +For more advanced request manipulations ```submit()``` method returns ```http.ClientRequest``` object, or you can choose from one of the alternative submission methods. + +### Custom options + +You can provide custom options, such as `maxDataSize`: + +``` javascript +var FormData = require('form-data'); + +var form = new FormData({ maxDataSize: 20971520 }); +form.append('my_field', 'my value'); +form.append('my_buffer', /* something big */); +``` + +List of available options could be found in [combined-stream](https://github.com/felixge/node-combined-stream/blob/master/lib/combined_stream.js#L7-L15) + +### Alternative submission methods + +You can use node's http client interface: + +``` javascript +var http = require('http'); + +var request = http.request({ + method: 'post', + host: 'example.org', + path: '/upload', + headers: form.getHeaders() +}); + +form.pipe(request); + +request.on('response', function(res) { + console.log(res.statusCode); +}); +``` + +Or if you would prefer the `'Content-Length'` header to be set for you: + +``` javascript +form.submit('example.org/upload', function(err, res) { + console.log(res.statusCode); +}); +``` + +To use custom headers and pre-known length in parts: + +``` javascript +var CRLF = '\r\n'; +var form = new FormData(); + +var options = { + header: CRLF + '--' + form.getBoundary() + CRLF + 'X-Custom-Header: 123' + CRLF + CRLF, + knownLength: 1 +}; + +form.append('my_buffer', buffer, options); + +form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); +}); +``` + +Form-Data can recognize and fetch all the required information from common types of streams (```fs.readStream```, ```http.response``` and ```mikeal's request```), for some other types of streams you'd need to provide "file"-related information manually: + +``` javascript +someModule.stream(function(err, stdout, stderr) { + if (err) throw err; + + var form = new FormData(); + + form.append('file', stdout, { + filename: 'unicycle.jpg', // ... or: + filepath: 'photos/toys/unicycle.jpg', + contentType: 'image/jpeg', + knownLength: 19806 + }); + + form.submit('http://example.com/', function(err, res) { + if (err) throw err; + console.log('Done'); + }); +}); +``` + +The `filepath` property overrides `filename` and may contain a relative path. This is typically used when uploading [multiple files from a directory](https://wicg.github.io/entries-api/#dom-htmlinputelement-webkitdirectory). + +For edge cases, like POST request to URL with query string or to pass HTTP auth credentials, object can be passed to `form.submit()` as first parameter: + +``` javascript +form.submit({ + host: 'example.com', + path: '/probably.php?extra=params', + auth: 'username:password' +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +In case you need to also send custom HTTP headers with the POST request, you can use the `headers` key in first parameter of `form.submit()`: + +``` javascript +form.submit({ + host: 'example.com', + path: '/surelynot.php', + headers: {'x-test-header': 'test-header-value'} +}, function(err, res) { + console.log(res.statusCode); +}); +``` + +### Methods + +- [_Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] )](https://github.com/form-data/form-data#void-append-string-field-mixed-value--mixed-options-). +- [_Array_ getHeaders( [**Array** _userHeaders_] )](https://github.com/form-data/form-data#array-getheaders-array-userheaders-) +- [_String_ getBoundary()](https://github.com/form-data/form-data#string-getboundary) +- [_Buffer_ getBuffer()](https://github.com/form-data/form-data#buffer-getbuffer) +- [_Integer_ getLengthSync()](https://github.com/form-data/form-data#integer-getlengthsync) +- [_Integer_ getLength( **function** _callback_ )](https://github.com/form-data/form-data#integer-getlength-function-callback-) +- [_Boolean_ hasKnownLength()](https://github.com/form-data/form-data#boolean-hasknownlength) +- [_Request_ submit( _params_, **function** _callback_ )](https://github.com/form-data/form-data#request-submit-params-function-callback-) +- [_String_ toString()](https://github.com/form-data/form-data#string-tostring) + +#### _Void_ append( **String** _field_, **Mixed** _value_ [, **Mixed** _options_] ) +Append data to the form. You can submit about any format (string, integer, boolean, buffer, etc.). However, Arrays are not supported and need to be turned into strings by the user. +```javascript +var form = new FormData(); +form.append( 'my_string', 'my value' ); +form.append( 'my_integer', 1 ); +form.append( 'my_boolean', true ); +form.append( 'my_buffer', new Buffer(10) ); +form.append( 'my_array_as_json', JSON.stringify( ['bird','cute'] ) ) +``` + +You may provide a string for options, or an object. +```javascript +// Set filename by providing a string for options +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), 'bar.jpg' ); + +// provide an object. +form.append( 'my_file', fs.createReadStream('/foo/bar.jpg'), {filename: 'bar.jpg', contentType: 'image/jpeg', knownLength: 19806} ); +``` + +#### _Array_ getHeaders( [**Array** _userHeaders_] ) +This method ads the correct `content-type` header to the provided array of `userHeaders`. + +#### _String_ getBoundary() +Return the boundary of the formData. A boundary consists of 26 `-` followed by 24 numbers +for example: +```javascript +--------------------------515890814546601021194782 +``` +_Note: The boundary must be unique and may not appear in the data._ + +#### _Buffer_ getBuffer() +Return the full formdata request package, as a Buffer. You can insert this Buffer in e.g. Axios to send multipart data. +```javascript +var form = new FormData(); +form.append( 'my_buffer', Buffer.from([0x4a,0x42,0x20,0x52,0x6f,0x63,0x6b,0x73]) ); +form.append( 'my_file', fs.readFileSync('/foo/bar.jpg') ); + +axios.post( 'https://example.com/path/to/api', + form.getBuffer(), + form.getHeaders() + ) +``` +**Note:** Because the output is of type Buffer, you can only append types that are accepted by Buffer: *string, Buffer, ArrayBuffer, Array, or Array-like Object*. A ReadStream for example will result in an error. + +#### _Integer_ getLengthSync() +Same as `getLength` but synchronous. + +_Note: getLengthSync __doesn't__ calculate streams length._ + +#### _Integer_ getLength( **function** _callback_ ) +Returns the `Content-Length` async. The callback is used to handle errors and continue once the length has been calculated +```javascript +this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + ... +}.bind(this)); +``` + +#### _Boolean_ hasKnownLength() +Checks if the length of added values is known. + +#### _Request_ submit( _params_, **function** _callback_ ) +Submit the form to a web application. +```javascript +var form = new FormData(); +form.append( 'my_string', 'Hello World' ); + +form.submit( 'http://example.com/', function(err, res) { + // res – response object (http.IncomingMessage) // + res.resume(); +} ); +``` + +#### _String_ toString() +Returns the form data as a string. Don't use this if you are sending files or buffers, use `getBuffer()` instead. + +### Integration with other libraries + +#### Request + +Form submission using [request](https://github.com/request/request): + +```javascript +var formData = { + my_field: 'my_value', + my_file: fs.createReadStream(__dirname + '/unicycle.jpg'), +}; + +request.post({url:'http://service.com/upload', formData: formData}, function(err, httpResponse, body) { + if (err) { + return console.error('upload failed:', err); + } + console.log('Upload successful! Server responded with:', body); +}); +``` + +For more details see [request readme](https://github.com/request/request#multipartform-data-multipart-form-uploads). + +#### node-fetch + +You can also submit a form using [node-fetch](https://github.com/bitinn/node-fetch): + +```javascript +var form = new FormData(); + +form.append('a', 1); + +fetch('http://example.com', { method: 'POST', body: form }) + .then(function(res) { + return res.json(); + }).then(function(json) { + console.log(json); + }); +``` + +#### axios + +In Node.js you can post a file using [axios](https://github.com/axios/axios): +```javascript +const form = new FormData(); +const stream = fs.createReadStream(PATH_TO_FILE); + +form.append('image', stream); + +// In Node.js environment you need to set boundary in the header field 'Content-Type' by calling method `getHeaders` +const formHeaders = form.getHeaders(); + +axios.post('http://example.com', form, { + headers: { + ...formHeaders, + }, +}) +.then(response => response) +.catch(error => error) +``` + +## Notes + +- ```getLengthSync()``` method DOESN'T calculate length for streams, use ```knownLength``` options as workaround. +- Starting version `2.x` FormData has dropped support for `node@0.10.x`. + +## License + +Form-Data is released under the [MIT](License) license. diff --git a/node_modules/form-data/index.d.ts b/node_modules/form-data/index.d.ts new file mode 100644 index 0000000000..255292d7e3 --- /dev/null +++ b/node_modules/form-data/index.d.ts @@ -0,0 +1,51 @@ +// Definitions by: Carlos Ballesteros Velasco +// Leon Yu +// BendingBender +// Maple Miao + +/// +import * as stream from 'stream'; +import * as http from 'http'; + +export = FormData; + +interface Options { + writable?: boolean; + readable?: boolean; + dataSize?: number; + maxDataSize?: number; + pauseStreams?: boolean; +} + +declare class FormData extends stream.Readable { + constructor(options?: Options); + append(key: string, value: any, options?: FormData.AppendOptions | string): void; + getHeaders(): FormData.Headers; + submit( + params: string | FormData.SubmitOptions, + callback?: (error: Error | null, response: http.IncomingMessage) => void + ): http.ClientRequest; + getBuffer(): Buffer; + getBoundary(): string; + getLength(callback: (err: Error | null, length: number) => void): void; + getLengthSync(): number; + hasKnownLength(): boolean; +} + +declare namespace FormData { + interface Headers { + [key: string]: any; + } + + interface AppendOptions { + header?: string | Headers; + knownLength?: number; + filename?: string; + filepath?: string; + contentType?: string; + } + + interface SubmitOptions extends http.RequestOptions { + protocol?: 'https:' | 'http:'; + } +} diff --git a/node_modules/form-data/lib/browser.js b/node_modules/form-data/lib/browser.js new file mode 100644 index 0000000000..09e7c70e6e --- /dev/null +++ b/node_modules/form-data/lib/browser.js @@ -0,0 +1,2 @@ +/* eslint-env browser */ +module.exports = typeof self == 'object' ? self.FormData : window.FormData; diff --git a/node_modules/form-data/lib/form_data.js b/node_modules/form-data/lib/form_data.js new file mode 100644 index 0000000000..9c07e32c0f --- /dev/null +++ b/node_modules/form-data/lib/form_data.js @@ -0,0 +1,483 @@ +var CombinedStream = require('combined-stream'); +var util = require('util'); +var path = require('path'); +var http = require('http'); +var https = require('https'); +var parseUrl = require('url').parse; +var fs = require('fs'); +var mime = require('mime-types'); +var asynckit = require('asynckit'); +var populate = require('./populate.js'); + +// Public API +module.exports = FormData; + +// make it a Stream +util.inherits(FormData, CombinedStream); + +/** + * Create readable "multipart/form-data" streams. + * Can be used to submit forms + * and file uploads to other web applications. + * + * @constructor + * @param {Object} options - Properties to be added/overriden for FormData and CombinedStream + */ +function FormData(options) { + if (!(this instanceof FormData)) { + return new FormData(); + } + + this._overheadLength = 0; + this._valueLength = 0; + this._valuesToMeasure = []; + + CombinedStream.call(this); + + options = options || {}; + for (var option in options) { + this[option] = options[option]; + } +} + +FormData.LINE_BREAK = '\r\n'; +FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; + +FormData.prototype.append = function(field, value, options) { + + options = options || {}; + + // allow filename as single option + if (typeof options == 'string') { + options = {filename: options}; + } + + var append = CombinedStream.prototype.append.bind(this); + + // all that streamy business can't handle numbers + if (typeof value == 'number') { + value = '' + value; + } + + // https://github.com/felixge/node-form-data/issues/38 + if (util.isArray(value)) { + // Please convert your array into string + // the way web server expects it + this._error(new Error('Arrays are not supported.')); + return; + } + + var header = this._multiPartHeader(field, value, options); + var footer = this._multiPartFooter(); + + append(header); + append(value); + append(footer); + + // pass along options.knownLength + this._trackLength(header, value, options); +}; + +FormData.prototype._trackLength = function(header, value, options) { + var valueLength = 0; + + // used w/ getLengthSync(), when length is known. + // e.g. for streaming directly from a remote server, + // w/ a known file a size, and not wanting to wait for + // incoming file to finish to get its size. + if (options.knownLength != null) { + valueLength += +options.knownLength; + } else if (Buffer.isBuffer(value)) { + valueLength = value.length; + } else if (typeof value === 'string') { + valueLength = Buffer.byteLength(value); + } + + this._valueLength += valueLength; + + // @check why add CRLF? does this account for custom/multiple CRLFs? + this._overheadLength += + Buffer.byteLength(header) + + FormData.LINE_BREAK.length; + + // empty or either doesn't have path or not an http response + if (!value || ( !value.path && !(value.readable && value.hasOwnProperty('httpVersion')) )) { + return; + } + + // no need to bother with the length + if (!options.knownLength) { + this._valuesToMeasure.push(value); + } +}; + +FormData.prototype._lengthRetriever = function(value, callback) { + + if (value.hasOwnProperty('fd')) { + + // take read range into a account + // `end` = Infinity –> read file till the end + // + // TODO: Looks like there is bug in Node fs.createReadStream + // it doesn't respect `end` options without `start` options + // Fix it when node fixes it. + // https://github.com/joyent/node/issues/7819 + if (value.end != undefined && value.end != Infinity && value.start != undefined) { + + // when end specified + // no need to calculate range + // inclusive, starts with 0 + callback(null, value.end + 1 - (value.start ? value.start : 0)); + + // not that fast snoopy + } else { + // still need to fetch file size from fs + fs.stat(value.path, function(err, stat) { + + var fileSize; + + if (err) { + callback(err); + return; + } + + // update final size based on the range options + fileSize = stat.size - (value.start ? value.start : 0); + callback(null, fileSize); + }); + } + + // or http response + } else if (value.hasOwnProperty('httpVersion')) { + callback(null, +value.headers['content-length']); + + // or request stream http://github.com/mikeal/request + } else if (value.hasOwnProperty('httpModule')) { + // wait till response come back + value.on('response', function(response) { + value.pause(); + callback(null, +response.headers['content-length']); + }); + value.resume(); + + // something else + } else { + callback('Unknown stream'); + } +}; + +FormData.prototype._multiPartHeader = function(field, value, options) { + // custom header specified (as string)? + // it becomes responsible for boundary + // (e.g. to handle extra CRLFs on .NET servers) + if (typeof options.header == 'string') { + return options.header; + } + + var contentDisposition = this._getContentDisposition(value, options); + var contentType = this._getContentType(value, options); + + var contents = ''; + var headers = { + // add custom disposition as third element or keep it two elements if not + 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), + // if no content type. allow it to be empty array + 'Content-Type': [].concat(contentType || []) + }; + + // allow custom headers. + if (typeof options.header == 'object') { + populate(headers, options.header); + } + + var header; + for (var prop in headers) { + if (!headers.hasOwnProperty(prop)) continue; + header = headers[prop]; + + // skip nullish headers. + if (header == null) { + continue; + } + + // convert all headers to arrays. + if (!Array.isArray(header)) { + header = [header]; + } + + // add non-empty headers. + if (header.length) { + contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; + } + } + + return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; +}; + +FormData.prototype._getContentDisposition = function(value, options) { + + var filename + , contentDisposition + ; + + if (typeof options.filepath === 'string') { + // custom filepath for relative paths + filename = path.normalize(options.filepath).replace(/\\/g, '/'); + } else if (options.filename || value.name || value.path) { + // custom filename take precedence + // formidable and the browser add a name property + // fs- and request- streams have path property + filename = path.basename(options.filename || value.name || value.path); + } else if (value.readable && value.hasOwnProperty('httpVersion')) { + // or try http response + filename = path.basename(value.client._httpMessage.path || ''); + } + + if (filename) { + contentDisposition = 'filename="' + filename + '"'; + } + + return contentDisposition; +}; + +FormData.prototype._getContentType = function(value, options) { + + // use custom content-type above all + var contentType = options.contentType; + + // or try `name` from formidable, browser + if (!contentType && value.name) { + contentType = mime.lookup(value.name); + } + + // or try `path` from fs-, request- streams + if (!contentType && value.path) { + contentType = mime.lookup(value.path); + } + + // or if it's http-reponse + if (!contentType && value.readable && value.hasOwnProperty('httpVersion')) { + contentType = value.headers['content-type']; + } + + // or guess it from the filepath or filename + if (!contentType && (options.filepath || options.filename)) { + contentType = mime.lookup(options.filepath || options.filename); + } + + // fallback to the default content type if `value` is not simple value + if (!contentType && typeof value == 'object') { + contentType = FormData.DEFAULT_CONTENT_TYPE; + } + + return contentType; +}; + +FormData.prototype._multiPartFooter = function() { + return function(next) { + var footer = FormData.LINE_BREAK; + + var lastPart = (this._streams.length === 0); + if (lastPart) { + footer += this._lastBoundary(); + } + + next(footer); + }.bind(this); +}; + +FormData.prototype._lastBoundary = function() { + return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; +}; + +FormData.prototype.getHeaders = function(userHeaders) { + var header; + var formHeaders = { + 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() + }; + + for (header in userHeaders) { + if (userHeaders.hasOwnProperty(header)) { + formHeaders[header.toLowerCase()] = userHeaders[header]; + } + } + + return formHeaders; +}; + +FormData.prototype.getBoundary = function() { + if (!this._boundary) { + this._generateBoundary(); + } + + return this._boundary; +}; + +FormData.prototype.getBuffer = function() { + var dataBuffer = new Buffer.alloc( 0 ); + var boundary = this.getBoundary(); + + // Create the form content. Add Line breaks to the end of data. + for (var i = 0, len = this._streams.length; i < len; i++) { + if (typeof this._streams[i] !== 'function') { + + // Add content to the buffer. + if(Buffer.isBuffer(this._streams[i])) { + dataBuffer = Buffer.concat( [dataBuffer, this._streams[i]]); + }else { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(this._streams[i])]); + } + + // Add break after content. + if (typeof this._streams[i] !== 'string' || this._streams[i].substring( 2, boundary.length + 2 ) !== boundary) { + dataBuffer = Buffer.concat( [dataBuffer, Buffer.from(FormData.LINE_BREAK)] ); + } + } + } + + // Add the footer and return the Buffer object. + return Buffer.concat( [dataBuffer, Buffer.from(this._lastBoundary())] ); +}; + +FormData.prototype._generateBoundary = function() { + // This generates a 50 character boundary similar to those used by Firefox. + // They are optimized for boyer-moore parsing. + var boundary = '--------------------------'; + for (var i = 0; i < 24; i++) { + boundary += Math.floor(Math.random() * 10).toString(16); + } + + this._boundary = boundary; +}; + +// Note: getLengthSync DOESN'T calculate streams length +// As workaround one can calculate file size manually +// and add it as knownLength option +FormData.prototype.getLengthSync = function() { + var knownLength = this._overheadLength + this._valueLength; + + // Don't get confused, there are 3 "internal" streams for each keyval pair + // so it basically checks if there is any value added to the form + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + // https://github.com/form-data/form-data/issues/40 + if (!this.hasKnownLength()) { + // Some async length retrievers are present + // therefore synchronous length calculation is false. + // Please use getLength(callback) to get proper length + this._error(new Error('Cannot calculate proper length in synchronous way.')); + } + + return knownLength; +}; + +// Public API to check if length of added values is known +// https://github.com/form-data/form-data/issues/196 +// https://github.com/form-data/form-data/issues/262 +FormData.prototype.hasKnownLength = function() { + var hasKnownLength = true; + + if (this._valuesToMeasure.length) { + hasKnownLength = false; + } + + return hasKnownLength; +}; + +FormData.prototype.getLength = function(cb) { + var knownLength = this._overheadLength + this._valueLength; + + if (this._streams.length) { + knownLength += this._lastBoundary().length; + } + + if (!this._valuesToMeasure.length) { + process.nextTick(cb.bind(this, null, knownLength)); + return; + } + + asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function(err, values) { + if (err) { + cb(err); + return; + } + + values.forEach(function(length) { + knownLength += length; + }); + + cb(null, knownLength); + }); +}; + +FormData.prototype.submit = function(params, cb) { + var request + , options + , defaults = {method: 'post'} + ; + + // parse provided url if it's string + // or treat it as options object + if (typeof params == 'string') { + + params = parseUrl(params); + options = populate({ + port: params.port, + path: params.pathname, + host: params.hostname, + protocol: params.protocol + }, defaults); + + // use custom params + } else { + + options = populate(params, defaults); + // if no port provided use default one + if (!options.port) { + options.port = options.protocol == 'https:' ? 443 : 80; + } + } + + // put that good code in getHeaders to some use + options.headers = this.getHeaders(params.headers); + + // https if specified, fallback to http in any other case + if (options.protocol == 'https:') { + request = https.request(options); + } else { + request = http.request(options); + } + + // get content length and fire away + this.getLength(function(err, length) { + if (err) { + this._error(err); + return; + } + + // add content length + request.setHeader('Content-Length', length); + + this.pipe(request); + if (cb) { + request.on('error', cb); + request.on('response', cb.bind(this, null)); + } + }.bind(this)); + + return request; +}; + +FormData.prototype._error = function(err) { + if (!this.error) { + this.error = err; + this.pause(); + this.emit('error', err); + } +}; + +FormData.prototype.toString = function () { + return '[object FormData]'; +}; diff --git a/node_modules/form-data/lib/populate.js b/node_modules/form-data/lib/populate.js new file mode 100644 index 0000000000..4d35738dd5 --- /dev/null +++ b/node_modules/form-data/lib/populate.js @@ -0,0 +1,10 @@ +// populates missing values +module.exports = function(dst, src) { + + Object.keys(src).forEach(function(prop) + { + dst[prop] = dst[prop] || src[prop]; + }); + + return dst; +}; diff --git a/node_modules/form-data/package.json b/node_modules/form-data/package.json new file mode 100644 index 0000000000..7423f200d6 --- /dev/null +++ b/node_modules/form-data/package.json @@ -0,0 +1,68 @@ +{ + "author": "Felix Geisendörfer (http://debuggable.com/)", + "name": "form-data", + "description": "A library to create readable \"multipart/form-data\" streams. Can be used to submit forms and file uploads to other web applications.", + "version": "2.5.1", + "repository": { + "type": "git", + "url": "git://github.com/form-data/form-data.git" + }, + "main": "./lib/form_data", + "browser": "./lib/browser", + "typings": "./index.d.ts", + "scripts": { + "pretest": "rimraf coverage test/tmp", + "test": "istanbul cover test/run.js", + "posttest": "istanbul report lcov text", + "lint": "eslint lib/*.js test/*.js test/integration/*.js", + "report": "istanbul report lcov text", + "ci-lint": "is-node-modern 8 && npm run lint || is-node-not-modern 8", + "ci-test": "npm run test && npm run browser && npm run report", + "predebug": "rimraf coverage test/tmp", + "debug": "verbose=1 ./test/run.js", + "browser": "browserify -t browserify-istanbul test/run-browser.js | obake --coverage", + "check": "istanbul check-coverage coverage/coverage*.json", + "files": "pkgfiles --sort=name", + "get-version": "node -e \"console.log(require('./package.json').version)\"", + "update-readme": "sed -i.bak 's/\\/master\\.svg/\\/v'$(npm --silent run get-version)'.svg/g' README.md", + "restore-readme": "mv README.md.bak README.md", + "prepublish": "in-publish && npm run update-readme || not-in-publish", + "postpublish": "npm run restore-readme" + }, + "pre-commit": [ + "lint", + "ci-test", + "check" + ], + "engines": { + "node": ">= 0.12" + }, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "devDependencies": { + "@types/node": "^12.0.10", + "browserify": "^13.1.1", + "browserify-istanbul": "^2.0.0", + "coveralls": "^3.0.4", + "cross-spawn": "^6.0.5", + "eslint": "^6.0.1", + "fake": "^0.2.2", + "far": "^0.0.7", + "formidable": "^1.0.17", + "in-publish": "^2.0.0", + "is-node-modern": "^1.0.0", + "istanbul": "^0.4.5", + "obake": "^0.1.2", + "phantomjs-prebuilt": "^2.1.13", + "pkgfiles": "^2.3.0", + "pre-commit": "^1.1.3", + "request": "^2.88.0", + "rimraf": "^2.7.1", + "tape": "^4.6.2", + "typescript": "^3.5.2" + }, + "license": "MIT" +} diff --git a/node_modules/ip-regex/index.js b/node_modules/ip-regex/index.js new file mode 100644 index 0000000000..973e5f41c2 --- /dev/null +++ b/node_modules/ip-regex/index.js @@ -0,0 +1,24 @@ +'use strict'; + +const v4 = '(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])(?:\\.(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9][0-9]|[0-9])){3}'; + +const v6seg = '[0-9a-fA-F]{1,4}'; +const v6 = ` +( +(?:${v6seg}:){7}(?:${v6seg}|:)| // 1:2:3:4:5:6:7:: 1:2:3:4:5:6:7:8 +(?:${v6seg}:){6}(?:${v4}|:${v6seg}|:)| // 1:2:3:4:5:6:: 1:2:3:4:5:6::8 1:2:3:4:5:6::8 1:2:3:4:5:6::1.2.3.4 +(?:${v6seg}:){5}(?::${v4}|(:${v6seg}){1,2}|:)| // 1:2:3:4:5:: 1:2:3:4:5::7:8 1:2:3:4:5::8 1:2:3:4:5::7:1.2.3.4 +(?:${v6seg}:){4}(?:(:${v6seg}){0,1}:${v4}|(:${v6seg}){1,3}|:)| // 1:2:3:4:: 1:2:3:4::6:7:8 1:2:3:4::8 1:2:3:4::6:7:1.2.3.4 +(?:${v6seg}:){3}(?:(:${v6seg}){0,2}:${v4}|(:${v6seg}){1,4}|:)| // 1:2:3:: 1:2:3::5:6:7:8 1:2:3::8 1:2:3::5:6:7:1.2.3.4 +(?:${v6seg}:){2}(?:(:${v6seg}){0,3}:${v4}|(:${v6seg}){1,5}|:)| // 1:2:: 1:2::4:5:6:7:8 1:2::8 1:2::4:5:6:7:1.2.3.4 +(?:${v6seg}:){1}(?:(:${v6seg}){0,4}:${v4}|(:${v6seg}){1,6}|:)| // 1:: 1::3:4:5:6:7:8 1::8 1::3:4:5:6:7:1.2.3.4 +(?::((?::${v6seg}){0,5}:${v4}|(?::${v6seg}){1,7}|:)) // ::2:3:4:5:6:7:8 ::2:3:4:5:6:7:8 ::8 ::1.2.3.4 +)(%[0-9a-zA-Z]{1,})? // %eth0 %1 +`.replace(/\s*\/\/.*$/gm, '').replace(/\n/g, '').trim(); + +const ip = module.exports = opts => opts && opts.exact ? + new RegExp(`(?:^${v4}$)|(?:^${v6}$)`) : + new RegExp(`(?:${v4})|(?:${v6})`, 'g'); + +ip.v4 = opts => opts && opts.exact ? new RegExp(`^${v4}$`) : new RegExp(v4, 'g'); +ip.v6 = opts => opts && opts.exact ? new RegExp(`^${v6}$`) : new RegExp(v6, 'g'); diff --git a/node_modules/ip-regex/license b/node_modules/ip-regex/license new file mode 100644 index 0000000000..654d0bfe94 --- /dev/null +++ b/node_modules/ip-regex/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/ip-regex/package.json b/node_modules/ip-regex/package.json new file mode 100644 index 0000000000..aa51479286 --- /dev/null +++ b/node_modules/ip-regex/package.json @@ -0,0 +1,45 @@ +{ + "name": "ip-regex", + "version": "2.1.0", + "description": "Regular expression for matching IP addresses (IPv4 & IPv6)", + "license": "MIT", + "repository": "sindresorhus/ip-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=4" + }, + "scripts": { + "test": "xo && ava" + }, + "files": [ + "index.js" + ], + "keywords": [ + "ip", + "ipv6", + "ipv4", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "text", + "pattern", + "internet", + "protocol", + "address", + "validate" + ], + "devDependencies": { + "ava": "*", + "xo": "*" + }, + "xo": { + "esnext": true + } +} diff --git a/node_modules/ip-regex/readme.md b/node_modules/ip-regex/readme.md new file mode 100644 index 0000000000..66bc7f2731 --- /dev/null +++ b/node_modules/ip-regex/readme.md @@ -0,0 +1,63 @@ +# ip-regex [![Build Status](https://travis-ci.org/sindresorhus/ip-regex.svg?branch=master)](https://travis-ci.org/sindresorhus/ip-regex) + +> Regular expression for matching IP addresses + + +## Install + +``` +$ npm install --save ip-regex +``` + + +## Usage + +```js +const ipRegex = require('ip-regex'); + +// Contains an IP address? +ipRegex().test('unicorn 192.168.0.1'); +//=> true + +// Is an IP address? +ipRegex({exact: true}).test('unicorn 192.168.0.1'); +//=> false + +ipRegex.v6({exact: true}).test('1:2:3:4:5:6:7:8'); +//=> true + +'unicorn 192.168.0.1 cake 1:2:3:4:5:6:7:8 rainbow'.match(ipRegex()); +//=> ['192.168.0.1', '1:2:3:4:5:6:7:8'] +``` + + +## API + +### ipRegex([options]) + +Returns a regex for matching both IPv4 and IPv6. + +### ipRegex.v4([options]) + +Returns a regex for matching IPv4. + +### ipRegex.v6([options]) + +Returns a regex for matching IPv6. + +#### options.exact + +Type: `boolean`
+Default: `false` *(Matches any IP address in a string)* + +Only match an exact string. Useful with `RegExp#test()` to check if a string is an IP address. + + +## Related + +- [is-ip](https://github.com/sindresorhus/is-ip) - Check if a string is an IP address + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/mime-db/HISTORY.md b/node_modules/mime-db/HISTORY.md new file mode 100644 index 0000000000..7436f64146 --- /dev/null +++ b/node_modules/mime-db/HISTORY.md @@ -0,0 +1,507 @@ +1.52.0 / 2022-02-21 +=================== + + * Add extensions from IANA for more `image/*` types + * Add extension `.asc` to `application/pgp-keys` + * Add extensions to various XML types + * Add new upstream MIME types + +1.51.0 / 2021-11-08 +=================== + + * Add new upstream MIME types + * Mark `image/vnd.microsoft.icon` as compressible + * Mark `image/vnd.ms-dds` as compressible + +1.50.0 / 2021-09-15 +=================== + + * Add deprecated iWorks mime types and extensions + * Add new upstream MIME types + +1.49.0 / 2021-07-26 +=================== + + * Add extension `.trig` to `application/trig` + * Add new upstream MIME types + +1.48.0 / 2021-05-30 +=================== + + * Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + * Add new upstream MIME types + * Mark `text/yaml` as compressible + +1.47.0 / 2021-04-01 +=================== + + * Add new upstream MIME types + * Remove ambigious extensions from IANA for `application/*+xml` types + * Update primary extension to `.es` for `application/ecmascript` + +1.46.0 / 2021-02-13 +=================== + + * Add extension `.amr` to `audio/amr` + * Add extension `.m4s` to `video/iso.segment` + * Add extension `.opus` to `audio/ogg` + * Add new upstream MIME types + +1.45.0 / 2020-09-22 +=================== + + * Add `application/ubjson` with extension `.ubj` + * Add `image/avif` with extension `.avif` + * Add `image/ktx2` with extension `.ktx2` + * Add extension `.dbf` to `application/vnd.dbf` + * Add extension `.rar` to `application/vnd.rar` + * Add extension `.td` to `application/urc-targetdesc+xml` + * Add new upstream MIME types + * Fix extension of `application/vnd.apple.keynote` to be `.key` + +1.44.0 / 2020-04-22 +=================== + + * Add charsets from IANA + * Add extension `.cjs` to `application/node` + * Add new upstream MIME types + +1.43.0 / 2020-01-05 +=================== + + * Add `application/x-keepass2` with extension `.kdbx` + * Add extension `.mxmf` to `audio/mobile-xmf` + * Add extensions from IANA for `application/*+xml` types + * Add new upstream MIME types + +1.42.0 / 2019-09-25 +=================== + + * Add `image/vnd.ms-dds` with extension `.dds` + * Add new upstream MIME types + * Remove compressible from `multipart/mixed` + +1.41.0 / 2019-08-30 +=================== + + * Add new upstream MIME types + * Add `application/toml` with extension `.toml` + * Mark `font/ttf` as compressible + +1.40.0 / 2019-04-20 +=================== + + * Add extensions from IANA for `model/*` types + * Add `text/mdx` with extension `.mdx` + +1.39.0 / 2019-04-04 +=================== + + * Add extensions `.siv` and `.sieve` to `application/sieve` + * Add new upstream MIME types + +1.38.0 / 2019-02-04 +=================== + + * Add extension `.nq` to `application/n-quads` + * Add extension `.nt` to `application/n-triples` + * Add new upstream MIME types + * Mark `text/less` as compressible + +1.37.0 / 2018-10-19 +=================== + + * Add extensions to HEIC image types + * Add new upstream MIME types + +1.36.0 / 2018-08-20 +=================== + + * Add Apple file extensions from IANA + * Add extensions from IANA for `image/*` types + * Add new upstream MIME types + +1.35.0 / 2018-07-15 +=================== + + * Add extension `.owl` to `application/rdf+xml` + * Add new upstream MIME types + - Removes extension `.woff` from `application/font-woff` + +1.34.0 / 2018-06-03 +=================== + + * Add extension `.csl` to `application/vnd.citationstyles.style+xml` + * Add extension `.es` to `application/ecmascript` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/turtle` + * Mark all XML-derived types as compressible + +1.33.0 / 2018-02-15 +=================== + + * Add extensions from IANA for `message/*` types + * Add new upstream MIME types + * Fix some incorrect OOXML types + * Remove `application/font-woff2` + +1.32.0 / 2017-11-29 +=================== + + * Add new upstream MIME types + * Update `text/hjson` to registered `application/hjson` + * Add `text/shex` with extension `.shex` + +1.31.0 / 2017-10-25 +=================== + + * Add `application/raml+yaml` with extension `.raml` + * Add `application/wasm` with extension `.wasm` + * Add new `font` type from IANA + * Add new upstream font extensions + * Add new upstream MIME types + * Add extensions for JPEG-2000 images + +1.30.0 / 2017-08-27 +=================== + + * Add `application/vnd.ms-outlook` + * Add `application/x-arj` + * Add extension `.mjs` to `application/javascript` + * Add glTF types and extensions + * Add new upstream MIME types + * Add `text/x-org` + * Add VirtualBox MIME types + * Fix `source` records for `video/*` types that are IANA + * Update `font/opentype` to registered `font/otf` + +1.29.0 / 2017-07-10 +=================== + + * Add `application/fido.trusted-apps+json` + * Add extension `.wadl` to `application/vnd.sun.wadl+xml` + * Add new upstream MIME types + * Add `UTF-8` as default charset for `text/css` + +1.28.0 / 2017-05-14 +=================== + + * Add new upstream MIME types + * Add extension `.gz` to `application/gzip` + * Update extensions `.md` and `.markdown` to be `text/markdown` + +1.27.0 / 2017-03-16 +=================== + + * Add new upstream MIME types + * Add `image/apng` with extension `.apng` + +1.26.0 / 2017-01-14 +=================== + + * Add new upstream MIME types + * Add extension `.geojson` to `application/geo+json` + +1.25.0 / 2016-11-11 +=================== + + * Add new upstream MIME types + +1.24.0 / 2016-09-18 +=================== + + * Add `audio/mp3` + * Add new upstream MIME types + +1.23.0 / 2016-05-01 +=================== + + * Add new upstream MIME types + * Add extension `.3gpp` to `audio/3gpp` + +1.22.0 / 2016-02-15 +=================== + + * Add `text/slim` + * Add extension `.rng` to `application/xml` + * Add new upstream MIME types + * Fix extension of `application/dash+xml` to be `.mpd` + * Update primary extension to `.m4a` for `audio/mp4` + +1.21.0 / 2016-01-06 +=================== + + * Add Google document types + * Add new upstream MIME types + +1.20.0 / 2015-11-10 +=================== + + * Add `text/x-suse-ymp` + * Add new upstream MIME types + +1.19.0 / 2015-09-17 +=================== + + * Add `application/vnd.apple.pkpass` + * Add new upstream MIME types + +1.18.0 / 2015-09-03 +=================== + + * Add new upstream MIME types + +1.17.0 / 2015-08-13 +=================== + + * Add `application/x-msdos-program` + * Add `audio/g711-0` + * Add `image/vnd.mozilla.apng` + * Add extension `.exe` to `application/x-msdos-program` + +1.16.0 / 2015-07-29 +=================== + + * Add `application/vnd.uri-map` + +1.15.0 / 2015-07-13 +=================== + + * Add `application/x-httpd-php` + +1.14.0 / 2015-06-25 +=================== + + * Add `application/scim+json` + * Add `application/vnd.3gpp.ussd+xml` + * Add `application/vnd.biopax.rdf+xml` + * Add `text/x-processing` + +1.13.0 / 2015-06-07 +=================== + + * Add nginx as a source + * Add `application/x-cocoa` + * Add `application/x-java-archive-diff` + * Add `application/x-makeself` + * Add `application/x-perl` + * Add `application/x-pilot` + * Add `application/x-redhat-package-manager` + * Add `application/x-sea` + * Add `audio/x-m4a` + * Add `audio/x-realaudio` + * Add `image/x-jng` + * Add `text/mathml` + +1.12.0 / 2015-06-05 +=================== + + * Add `application/bdoc` + * Add `application/vnd.hyperdrive+json` + * Add `application/x-bdoc` + * Add extension `.rtf` to `text/rtf` + +1.11.0 / 2015-05-31 +=================== + + * Add `audio/wav` + * Add `audio/wave` + * Add extension `.litcoffee` to `text/coffeescript` + * Add extension `.sfd-hdstx` to `application/vnd.hydrostatix.sof-data` + * Add extension `.n-gage` to `application/vnd.nokia.n-gage.symbian.install` + +1.10.0 / 2015-05-19 +=================== + + * Add `application/vnd.balsamiq.bmpr` + * Add `application/vnd.microsoft.portable-executable` + * Add `application/x-ns-proxy-autoconfig` + +1.9.1 / 2015-04-19 +================== + + * Remove `.json` extension from `application/manifest+json` + - This is causing bugs downstream + +1.9.0 / 2015-04-19 +================== + + * Add `application/manifest+json` + * Add `application/vnd.micro+json` + * Add `image/vnd.zbrush.pcx` + * Add `image/x-ms-bmp` + +1.8.0 / 2015-03-13 +================== + + * Add `application/vnd.citationstyles.style+xml` + * Add `application/vnd.fastcopy-disk-image` + * Add `application/vnd.gov.sk.xmldatacontainer+xml` + * Add extension `.jsonld` to `application/ld+json` + +1.7.0 / 2015-02-08 +================== + + * Add `application/vnd.gerber` + * Add `application/vnd.msa-disk-image` + +1.6.1 / 2015-02-05 +================== + + * Community extensions ownership transferred from `node-mime` + +1.6.0 / 2015-01-29 +================== + + * Add `application/jose` + * Add `application/jose+json` + * Add `application/json-seq` + * Add `application/jwk+json` + * Add `application/jwk-set+json` + * Add `application/jwt` + * Add `application/rdap+json` + * Add `application/vnd.gov.sk.e-form+xml` + * Add `application/vnd.ims.imsccv1p3` + +1.5.0 / 2014-12-30 +================== + + * Add `application/vnd.oracle.resource+json` + * Fix various invalid MIME type entries + - `application/mbox+xml` + - `application/oscp-response` + - `application/vwg-multiplexed` + - `audio/g721` + +1.4.0 / 2014-12-21 +================== + + * Add `application/vnd.ims.imsccv1p2` + * Fix various invalid MIME type entries + - `application/vnd-acucobol` + - `application/vnd-curl` + - `application/vnd-dart` + - `application/vnd-dxr` + - `application/vnd-fdf` + - `application/vnd-mif` + - `application/vnd-sema` + - `application/vnd-wap-wmlc` + - `application/vnd.adobe.flash-movie` + - `application/vnd.dece-zip` + - `application/vnd.dvb_service` + - `application/vnd.micrografx-igx` + - `application/vnd.sealed-doc` + - `application/vnd.sealed-eml` + - `application/vnd.sealed-mht` + - `application/vnd.sealed-ppt` + - `application/vnd.sealed-tiff` + - `application/vnd.sealed-xls` + - `application/vnd.sealedmedia.softseal-html` + - `application/vnd.sealedmedia.softseal-pdf` + - `application/vnd.wap-slc` + - `application/vnd.wap-wbxml` + - `audio/vnd.sealedmedia.softseal-mpeg` + - `image/vnd-djvu` + - `image/vnd-svf` + - `image/vnd-wap-wbmp` + - `image/vnd.sealed-png` + - `image/vnd.sealedmedia.softseal-gif` + - `image/vnd.sealedmedia.softseal-jpg` + - `model/vnd-dwf` + - `model/vnd.parasolid.transmit-binary` + - `model/vnd.parasolid.transmit-text` + - `text/vnd-a` + - `text/vnd-curl` + - `text/vnd.wap-wml` + * Remove example template MIME types + - `application/example` + - `audio/example` + - `image/example` + - `message/example` + - `model/example` + - `multipart/example` + - `text/example` + - `video/example` + +1.3.1 / 2014-12-16 +================== + + * Fix missing extensions + - `application/json5` + - `text/hjson` + +1.3.0 / 2014-12-07 +================== + + * Add `application/a2l` + * Add `application/aml` + * Add `application/atfx` + * Add `application/atxml` + * Add `application/cdfx+xml` + * Add `application/dii` + * Add `application/json5` + * Add `application/lxf` + * Add `application/mf4` + * Add `application/vnd.apache.thrift.compact` + * Add `application/vnd.apache.thrift.json` + * Add `application/vnd.coffeescript` + * Add `application/vnd.enphase.envoy` + * Add `application/vnd.ims.imsccv1p1` + * Add `text/csv-schema` + * Add `text/hjson` + * Add `text/markdown` + * Add `text/yaml` + +1.2.0 / 2014-11-09 +================== + + * Add `application/cea` + * Add `application/dit` + * Add `application/vnd.gov.sk.e-form+zip` + * Add `application/vnd.tmd.mediaflex.api+xml` + * Type `application/epub+zip` is now IANA-registered + +1.1.2 / 2014-10-23 +================== + + * Rebuild database for `application/x-www-form-urlencoded` change + +1.1.1 / 2014-10-20 +================== + + * Mark `application/x-www-form-urlencoded` as compressible. + +1.1.0 / 2014-09-28 +================== + + * Add `application/font-woff2` + +1.0.3 / 2014-09-25 +================== + + * Fix engine requirement in package + +1.0.2 / 2014-09-25 +================== + + * Add `application/coap-group+json` + * Add `application/dcd` + * Add `application/vnd.apache.thrift.binary` + * Add `image/vnd.tencent.tap` + * Mark all JSON-derived types as compressible + * Update `text/vtt` data + +1.0.1 / 2014-08-30 +================== + + * Fix extension ordering + +1.0.0 / 2014-08-30 +================== + + * Add `application/atf` + * Add `application/merge-patch+json` + * Add `multipart/x-mixed-replace` + * Add `source: 'apache'` metadata + * Add `source: 'iana'` metadata + * Remove badly-assumed charset data diff --git a/node_modules/mime-db/LICENSE b/node_modules/mime-db/LICENSE new file mode 100644 index 0000000000..0751cb10e9 --- /dev/null +++ b/node_modules/mime-db/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2022 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-db/README.md b/node_modules/mime-db/README.md new file mode 100644 index 0000000000..5a8fcfe4d0 --- /dev/null +++ b/node_modules/mime-db/README.md @@ -0,0 +1,100 @@ +# mime-db + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +This is a large database of mime types and information about them. +It consists of a single, public JSON file and does not include any logic, +allowing it to remain as un-opinionated as possible with an API. +It aggregates data from the following sources: + +- http://www.iana.org/assignments/media-types/media-types.xhtml +- http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types +- http://hg.nginx.org/nginx/raw-file/default/conf/mime.types + +## Installation + +```bash +npm install mime-db +``` + +### Database Download + +If you're crazy enough to use this in the browser, you can just grab the +JSON file using [jsDelivr](https://www.jsdelivr.com/). It is recommended to +replace `master` with [a release tag](https://github.com/jshttp/mime-db/tags) +as the JSON format may change in the future. + +``` +https://cdn.jsdelivr.net/gh/jshttp/mime-db@master/db.json +``` + +## Usage + +```js +var db = require('mime-db') + +// grab data on .js files +var data = db['application/javascript'] +``` + +## Data Structure + +The JSON file is a map lookup for lowercased mime types. +Each mime type has the following properties: + +- `.source` - where the mime type is defined. + If not set, it's probably a custom media type. + - `apache` - [Apache common media types](http://svn.apache.org/repos/asf/httpd/httpd/trunk/docs/conf/mime.types) + - `iana` - [IANA-defined media types](http://www.iana.org/assignments/media-types/media-types.xhtml) + - `nginx` - [nginx media types](http://hg.nginx.org/nginx/raw-file/default/conf/mime.types) +- `.extensions[]` - known extensions associated with this mime type. +- `.compressible` - whether a file of this type can be gzipped. +- `.charset` - the default charset associated with this type, if any. + +If unknown, every property could be `undefined`. + +## Contributing + +To edit the database, only make PRs against `src/custom-types.json` or +`src/custom-suffix.json`. + +The `src/custom-types.json` file is a JSON object with the MIME type as the +keys and the values being an object with the following keys: + +- `compressible` - leave out if you don't know, otherwise `true`/`false` to + indicate whether the data represented by the type is typically compressible. +- `extensions` - include an array of file extensions that are associated with + the type. +- `notes` - human-readable notes about the type, typically what the type is. +- `sources` - include an array of URLs of where the MIME type and the associated + extensions are sourced from. This needs to be a [primary source](https://en.wikipedia.org/wiki/Primary_source); + links to type aggregating sites and Wikipedia are _not acceptable_. + +To update the build, run `npm run build`. + +### Adding Custom Media Types + +The best way to get new media types included in this library is to register +them with the IANA. The community registration procedure is outlined in +[RFC 6838 section 5](http://tools.ietf.org/html/rfc6838#section-5). Types +registered with the IANA are automatically pulled into this library. + +If that is not possible / feasible, they can be added directly here as a +"custom" type. To do this, it is required to have a primary source that +definitively lists the media type. If an extension is going to be listed as +associateed with this media type, the source must definitively link the +media type and extension as well. + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-db/master?label=ci +[ci-url]: https://github.com/jshttp/mime-db/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-db/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-db?branch=master +[node-image]: https://badgen.net/npm/node/mime-db +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-db +[npm-url]: https://npmjs.org/package/mime-db +[npm-version-image]: https://badgen.net/npm/v/mime-db diff --git a/node_modules/mime-db/db.json b/node_modules/mime-db/db.json new file mode 100644 index 0000000000..eb9c42c457 --- /dev/null +++ b/node_modules/mime-db/db.json @@ -0,0 +1,8519 @@ +{ + "application/1d-interleaved-parityfec": { + "source": "iana" + }, + "application/3gpdash-qoe-report+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/3gpp-ims+xml": { + "source": "iana", + "compressible": true + }, + "application/3gpphal+json": { + "source": "iana", + "compressible": true + }, + "application/3gpphalforms+json": { + "source": "iana", + "compressible": true + }, + "application/a2l": { + "source": "iana" + }, + "application/ace+cbor": { + "source": "iana" + }, + "application/activemessage": { + "source": "iana" + }, + "application/activity+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-costmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-directory+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcost+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointcostparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointprop+json": { + "source": "iana", + "compressible": true + }, + "application/alto-endpointpropparams+json": { + "source": "iana", + "compressible": true + }, + "application/alto-error+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmap+json": { + "source": "iana", + "compressible": true + }, + "application/alto-networkmapfilter+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamcontrol+json": { + "source": "iana", + "compressible": true + }, + "application/alto-updatestreamparams+json": { + "source": "iana", + "compressible": true + }, + "application/aml": { + "source": "iana" + }, + "application/andrew-inset": { + "source": "iana", + "extensions": ["ez"] + }, + "application/applefile": { + "source": "iana" + }, + "application/applixware": { + "source": "apache", + "extensions": ["aw"] + }, + "application/at+jwt": { + "source": "iana" + }, + "application/atf": { + "source": "iana" + }, + "application/atfx": { + "source": "iana" + }, + "application/atom+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atom"] + }, + "application/atomcat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomcat"] + }, + "application/atomdeleted+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomdeleted"] + }, + "application/atomicmail": { + "source": "iana" + }, + "application/atomsvc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["atomsvc"] + }, + "application/atsc-dwd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dwd"] + }, + "application/atsc-dynamic-event-message": { + "source": "iana" + }, + "application/atsc-held+xml": { + "source": "iana", + "compressible": true, + "extensions": ["held"] + }, + "application/atsc-rdt+json": { + "source": "iana", + "compressible": true + }, + "application/atsc-rsat+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsat"] + }, + "application/atxml": { + "source": "iana" + }, + "application/auth-policy+xml": { + "source": "iana", + "compressible": true + }, + "application/bacnet-xdd+zip": { + "source": "iana", + "compressible": false + }, + "application/batch-smtp": { + "source": "iana" + }, + "application/bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/beep+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/calendar+json": { + "source": "iana", + "compressible": true + }, + "application/calendar+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xcs"] + }, + "application/call-completion": { + "source": "iana" + }, + "application/cals-1840": { + "source": "iana" + }, + "application/captive+json": { + "source": "iana", + "compressible": true + }, + "application/cbor": { + "source": "iana" + }, + "application/cbor-seq": { + "source": "iana" + }, + "application/cccex": { + "source": "iana" + }, + "application/ccmp+xml": { + "source": "iana", + "compressible": true + }, + "application/ccxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ccxml"] + }, + "application/cdfx+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdfx"] + }, + "application/cdmi-capability": { + "source": "iana", + "extensions": ["cdmia"] + }, + "application/cdmi-container": { + "source": "iana", + "extensions": ["cdmic"] + }, + "application/cdmi-domain": { + "source": "iana", + "extensions": ["cdmid"] + }, + "application/cdmi-object": { + "source": "iana", + "extensions": ["cdmio"] + }, + "application/cdmi-queue": { + "source": "iana", + "extensions": ["cdmiq"] + }, + "application/cdni": { + "source": "iana" + }, + "application/cea": { + "source": "iana" + }, + "application/cea-2018+xml": { + "source": "iana", + "compressible": true + }, + "application/cellml+xml": { + "source": "iana", + "compressible": true + }, + "application/cfw": { + "source": "iana" + }, + "application/city+json": { + "source": "iana", + "compressible": true + }, + "application/clr": { + "source": "iana" + }, + "application/clue+xml": { + "source": "iana", + "compressible": true + }, + "application/clue_info+xml": { + "source": "iana", + "compressible": true + }, + "application/cms": { + "source": "iana" + }, + "application/cnrp+xml": { + "source": "iana", + "compressible": true + }, + "application/coap-group+json": { + "source": "iana", + "compressible": true + }, + "application/coap-payload": { + "source": "iana" + }, + "application/commonground": { + "source": "iana" + }, + "application/conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/cose": { + "source": "iana" + }, + "application/cose-key": { + "source": "iana" + }, + "application/cose-key-set": { + "source": "iana" + }, + "application/cpl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cpl"] + }, + "application/csrattrs": { + "source": "iana" + }, + "application/csta+xml": { + "source": "iana", + "compressible": true + }, + "application/cstadata+xml": { + "source": "iana", + "compressible": true + }, + "application/csvm+json": { + "source": "iana", + "compressible": true + }, + "application/cu-seeme": { + "source": "apache", + "extensions": ["cu"] + }, + "application/cwt": { + "source": "iana" + }, + "application/cybercash": { + "source": "iana" + }, + "application/dart": { + "compressible": true + }, + "application/dash+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpd"] + }, + "application/dash-patch+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpp"] + }, + "application/dashdelta": { + "source": "iana" + }, + "application/davmount+xml": { + "source": "iana", + "compressible": true, + "extensions": ["davmount"] + }, + "application/dca-rft": { + "source": "iana" + }, + "application/dcd": { + "source": "iana" + }, + "application/dec-dx": { + "source": "iana" + }, + "application/dialog-info+xml": { + "source": "iana", + "compressible": true + }, + "application/dicom": { + "source": "iana" + }, + "application/dicom+json": { + "source": "iana", + "compressible": true + }, + "application/dicom+xml": { + "source": "iana", + "compressible": true + }, + "application/dii": { + "source": "iana" + }, + "application/dit": { + "source": "iana" + }, + "application/dns": { + "source": "iana" + }, + "application/dns+json": { + "source": "iana", + "compressible": true + }, + "application/dns-message": { + "source": "iana" + }, + "application/docbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dbk"] + }, + "application/dots+cbor": { + "source": "iana" + }, + "application/dskpp+xml": { + "source": "iana", + "compressible": true + }, + "application/dssc+der": { + "source": "iana", + "extensions": ["dssc"] + }, + "application/dssc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdssc"] + }, + "application/dvcs": { + "source": "iana" + }, + "application/ecmascript": { + "source": "iana", + "compressible": true, + "extensions": ["es","ecma"] + }, + "application/edi-consent": { + "source": "iana" + }, + "application/edi-x12": { + "source": "iana", + "compressible": false + }, + "application/edifact": { + "source": "iana", + "compressible": false + }, + "application/efi": { + "source": "iana" + }, + "application/elm+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/elm+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.cap+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/emergencycalldata.comment+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.control+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.deviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.ecall.msd": { + "source": "iana" + }, + "application/emergencycalldata.providerinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.serviceinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.subscriberinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/emergencycalldata.veds+xml": { + "source": "iana", + "compressible": true + }, + "application/emma+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emma"] + }, + "application/emotionml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["emotionml"] + }, + "application/encaprtp": { + "source": "iana" + }, + "application/epp+xml": { + "source": "iana", + "compressible": true + }, + "application/epub+zip": { + "source": "iana", + "compressible": false, + "extensions": ["epub"] + }, + "application/eshop": { + "source": "iana" + }, + "application/exi": { + "source": "iana", + "extensions": ["exi"] + }, + "application/expect-ct-report+json": { + "source": "iana", + "compressible": true + }, + "application/express": { + "source": "iana", + "extensions": ["exp"] + }, + "application/fastinfoset": { + "source": "iana" + }, + "application/fastsoap": { + "source": "iana" + }, + "application/fdt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fdt"] + }, + "application/fhir+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fhir+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/fido.trusted-apps+json": { + "compressible": true + }, + "application/fits": { + "source": "iana" + }, + "application/flexfec": { + "source": "iana" + }, + "application/font-sfnt": { + "source": "iana" + }, + "application/font-tdpfr": { + "source": "iana", + "extensions": ["pfr"] + }, + "application/font-woff": { + "source": "iana", + "compressible": false + }, + "application/framework-attributes+xml": { + "source": "iana", + "compressible": true + }, + "application/geo+json": { + "source": "iana", + "compressible": true, + "extensions": ["geojson"] + }, + "application/geo+json-seq": { + "source": "iana" + }, + "application/geopackage+sqlite3": { + "source": "iana" + }, + "application/geoxacml+xml": { + "source": "iana", + "compressible": true + }, + "application/gltf-buffer": { + "source": "iana" + }, + "application/gml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["gml"] + }, + "application/gpx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["gpx"] + }, + "application/gxf": { + "source": "apache", + "extensions": ["gxf"] + }, + "application/gzip": { + "source": "iana", + "compressible": false, + "extensions": ["gz"] + }, + "application/h224": { + "source": "iana" + }, + "application/held+xml": { + "source": "iana", + "compressible": true + }, + "application/hjson": { + "extensions": ["hjson"] + }, + "application/http": { + "source": "iana" + }, + "application/hyperstudio": { + "source": "iana", + "extensions": ["stk"] + }, + "application/ibe-key-request+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pkg-reply+xml": { + "source": "iana", + "compressible": true + }, + "application/ibe-pp-data": { + "source": "iana" + }, + "application/iges": { + "source": "iana" + }, + "application/im-iscomposing+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/index": { + "source": "iana" + }, + "application/index.cmd": { + "source": "iana" + }, + "application/index.obj": { + "source": "iana" + }, + "application/index.response": { + "source": "iana" + }, + "application/index.vnd": { + "source": "iana" + }, + "application/inkml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ink","inkml"] + }, + "application/iotp": { + "source": "iana" + }, + "application/ipfix": { + "source": "iana", + "extensions": ["ipfix"] + }, + "application/ipp": { + "source": "iana" + }, + "application/isup": { + "source": "iana" + }, + "application/its+xml": { + "source": "iana", + "compressible": true, + "extensions": ["its"] + }, + "application/java-archive": { + "source": "apache", + "compressible": false, + "extensions": ["jar","war","ear"] + }, + "application/java-serialized-object": { + "source": "apache", + "compressible": false, + "extensions": ["ser"] + }, + "application/java-vm": { + "source": "apache", + "compressible": false, + "extensions": ["class"] + }, + "application/javascript": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["js","mjs"] + }, + "application/jf2feed+json": { + "source": "iana", + "compressible": true + }, + "application/jose": { + "source": "iana" + }, + "application/jose+json": { + "source": "iana", + "compressible": true + }, + "application/jrd+json": { + "source": "iana", + "compressible": true + }, + "application/jscalendar+json": { + "source": "iana", + "compressible": true + }, + "application/json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["json","map"] + }, + "application/json-patch+json": { + "source": "iana", + "compressible": true + }, + "application/json-seq": { + "source": "iana" + }, + "application/json5": { + "extensions": ["json5"] + }, + "application/jsonml+json": { + "source": "apache", + "compressible": true, + "extensions": ["jsonml"] + }, + "application/jwk+json": { + "source": "iana", + "compressible": true + }, + "application/jwk-set+json": { + "source": "iana", + "compressible": true + }, + "application/jwt": { + "source": "iana" + }, + "application/kpml-request+xml": { + "source": "iana", + "compressible": true + }, + "application/kpml-response+xml": { + "source": "iana", + "compressible": true + }, + "application/ld+json": { + "source": "iana", + "compressible": true, + "extensions": ["jsonld"] + }, + "application/lgr+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lgr"] + }, + "application/link-format": { + "source": "iana" + }, + "application/load-control+xml": { + "source": "iana", + "compressible": true + }, + "application/lost+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lostxml"] + }, + "application/lostsync+xml": { + "source": "iana", + "compressible": true + }, + "application/lpf+zip": { + "source": "iana", + "compressible": false + }, + "application/lxf": { + "source": "iana" + }, + "application/mac-binhex40": { + "source": "iana", + "extensions": ["hqx"] + }, + "application/mac-compactpro": { + "source": "apache", + "extensions": ["cpt"] + }, + "application/macwriteii": { + "source": "iana" + }, + "application/mads+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mads"] + }, + "application/manifest+json": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["webmanifest"] + }, + "application/marc": { + "source": "iana", + "extensions": ["mrc"] + }, + "application/marcxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mrcx"] + }, + "application/mathematica": { + "source": "iana", + "extensions": ["ma","nb","mb"] + }, + "application/mathml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mathml"] + }, + "application/mathml-content+xml": { + "source": "iana", + "compressible": true + }, + "application/mathml-presentation+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-associated-procedure-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-deregister+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-envelope+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-msk-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-protection-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-reception-report+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-register-response+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-schedule+xml": { + "source": "iana", + "compressible": true + }, + "application/mbms-user-service-description+xml": { + "source": "iana", + "compressible": true + }, + "application/mbox": { + "source": "iana", + "extensions": ["mbox"] + }, + "application/media-policy-dataset+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpf"] + }, + "application/media_control+xml": { + "source": "iana", + "compressible": true + }, + "application/mediaservercontrol+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mscml"] + }, + "application/merge-patch+json": { + "source": "iana", + "compressible": true + }, + "application/metalink+xml": { + "source": "apache", + "compressible": true, + "extensions": ["metalink"] + }, + "application/metalink4+xml": { + "source": "iana", + "compressible": true, + "extensions": ["meta4"] + }, + "application/mets+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mets"] + }, + "application/mf4": { + "source": "iana" + }, + "application/mikey": { + "source": "iana" + }, + "application/mipc": { + "source": "iana" + }, + "application/missing-blocks+cbor-seq": { + "source": "iana" + }, + "application/mmt-aei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["maei"] + }, + "application/mmt-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musd"] + }, + "application/mods+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mods"] + }, + "application/moss-keys": { + "source": "iana" + }, + "application/moss-signature": { + "source": "iana" + }, + "application/mosskey-data": { + "source": "iana" + }, + "application/mosskey-request": { + "source": "iana" + }, + "application/mp21": { + "source": "iana", + "extensions": ["m21","mp21"] + }, + "application/mp4": { + "source": "iana", + "extensions": ["mp4s","m4p"] + }, + "application/mpeg4-generic": { + "source": "iana" + }, + "application/mpeg4-iod": { + "source": "iana" + }, + "application/mpeg4-iod-xmt": { + "source": "iana" + }, + "application/mrb-consumer+xml": { + "source": "iana", + "compressible": true + }, + "application/mrb-publish+xml": { + "source": "iana", + "compressible": true + }, + "application/msc-ivr+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msc-mixer+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/msword": { + "source": "iana", + "compressible": false, + "extensions": ["doc","dot"] + }, + "application/mud+json": { + "source": "iana", + "compressible": true + }, + "application/multipart-core": { + "source": "iana" + }, + "application/mxf": { + "source": "iana", + "extensions": ["mxf"] + }, + "application/n-quads": { + "source": "iana", + "extensions": ["nq"] + }, + "application/n-triples": { + "source": "iana", + "extensions": ["nt"] + }, + "application/nasdata": { + "source": "iana" + }, + "application/news-checkgroups": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-groupinfo": { + "source": "iana", + "charset": "US-ASCII" + }, + "application/news-transmission": { + "source": "iana" + }, + "application/nlsml+xml": { + "source": "iana", + "compressible": true + }, + "application/node": { + "source": "iana", + "extensions": ["cjs"] + }, + "application/nss": { + "source": "iana" + }, + "application/oauth-authz-req+jwt": { + "source": "iana" + }, + "application/oblivious-dns-message": { + "source": "iana" + }, + "application/ocsp-request": { + "source": "iana" + }, + "application/ocsp-response": { + "source": "iana" + }, + "application/octet-stream": { + "source": "iana", + "compressible": false, + "extensions": ["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"] + }, + "application/oda": { + "source": "iana", + "extensions": ["oda"] + }, + "application/odm+xml": { + "source": "iana", + "compressible": true + }, + "application/odx": { + "source": "iana" + }, + "application/oebps-package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["opf"] + }, + "application/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogx"] + }, + "application/omdoc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["omdoc"] + }, + "application/onenote": { + "source": "apache", + "extensions": ["onetoc","onetoc2","onetmp","onepkg"] + }, + "application/opc-nodeset+xml": { + "source": "iana", + "compressible": true + }, + "application/oscore": { + "source": "iana" + }, + "application/oxps": { + "source": "iana", + "extensions": ["oxps"] + }, + "application/p21": { + "source": "iana" + }, + "application/p21+zip": { + "source": "iana", + "compressible": false + }, + "application/p2p-overlay+xml": { + "source": "iana", + "compressible": true, + "extensions": ["relo"] + }, + "application/parityfec": { + "source": "iana" + }, + "application/passport": { + "source": "iana" + }, + "application/patch-ops-error+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xer"] + }, + "application/pdf": { + "source": "iana", + "compressible": false, + "extensions": ["pdf"] + }, + "application/pdx": { + "source": "iana" + }, + "application/pem-certificate-chain": { + "source": "iana" + }, + "application/pgp-encrypted": { + "source": "iana", + "compressible": false, + "extensions": ["pgp"] + }, + "application/pgp-keys": { + "source": "iana", + "extensions": ["asc"] + }, + "application/pgp-signature": { + "source": "iana", + "extensions": ["asc","sig"] + }, + "application/pics-rules": { + "source": "apache", + "extensions": ["prf"] + }, + "application/pidf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pidf-diff+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/pkcs10": { + "source": "iana", + "extensions": ["p10"] + }, + "application/pkcs12": { + "source": "iana" + }, + "application/pkcs7-mime": { + "source": "iana", + "extensions": ["p7m","p7c"] + }, + "application/pkcs7-signature": { + "source": "iana", + "extensions": ["p7s"] + }, + "application/pkcs8": { + "source": "iana", + "extensions": ["p8"] + }, + "application/pkcs8-encrypted": { + "source": "iana" + }, + "application/pkix-attr-cert": { + "source": "iana", + "extensions": ["ac"] + }, + "application/pkix-cert": { + "source": "iana", + "extensions": ["cer"] + }, + "application/pkix-crl": { + "source": "iana", + "extensions": ["crl"] + }, + "application/pkix-pkipath": { + "source": "iana", + "extensions": ["pkipath"] + }, + "application/pkixcmp": { + "source": "iana", + "extensions": ["pki"] + }, + "application/pls+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pls"] + }, + "application/poc-settings+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/postscript": { + "source": "iana", + "compressible": true, + "extensions": ["ai","eps","ps"] + }, + "application/ppsp-tracker+json": { + "source": "iana", + "compressible": true + }, + "application/problem+json": { + "source": "iana", + "compressible": true + }, + "application/problem+xml": { + "source": "iana", + "compressible": true + }, + "application/provenance+xml": { + "source": "iana", + "compressible": true, + "extensions": ["provx"] + }, + "application/prs.alvestrand.titrax-sheet": { + "source": "iana" + }, + "application/prs.cww": { + "source": "iana", + "extensions": ["cww"] + }, + "application/prs.cyn": { + "source": "iana", + "charset": "7-BIT" + }, + "application/prs.hpub+zip": { + "source": "iana", + "compressible": false + }, + "application/prs.nprend": { + "source": "iana" + }, + "application/prs.plucker": { + "source": "iana" + }, + "application/prs.rdf-xml-crypt": { + "source": "iana" + }, + "application/prs.xsf+xml": { + "source": "iana", + "compressible": true + }, + "application/pskc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["pskcxml"] + }, + "application/pvd+json": { + "source": "iana", + "compressible": true + }, + "application/qsig": { + "source": "iana" + }, + "application/raml+yaml": { + "compressible": true, + "extensions": ["raml"] + }, + "application/raptorfec": { + "source": "iana" + }, + "application/rdap+json": { + "source": "iana", + "compressible": true + }, + "application/rdf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rdf","owl"] + }, + "application/reginfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rif"] + }, + "application/relax-ng-compact-syntax": { + "source": "iana", + "extensions": ["rnc"] + }, + "application/remote-printing": { + "source": "iana" + }, + "application/reputon+json": { + "source": "iana", + "compressible": true + }, + "application/resource-lists+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rl"] + }, + "application/resource-lists-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rld"] + }, + "application/rfc+xml": { + "source": "iana", + "compressible": true + }, + "application/riscos": { + "source": "iana" + }, + "application/rlmi+xml": { + "source": "iana", + "compressible": true + }, + "application/rls-services+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rs"] + }, + "application/route-apd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rapd"] + }, + "application/route-s-tsid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sls"] + }, + "application/route-usd+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rusd"] + }, + "application/rpki-ghostbusters": { + "source": "iana", + "extensions": ["gbr"] + }, + "application/rpki-manifest": { + "source": "iana", + "extensions": ["mft"] + }, + "application/rpki-publication": { + "source": "iana" + }, + "application/rpki-roa": { + "source": "iana", + "extensions": ["roa"] + }, + "application/rpki-updown": { + "source": "iana" + }, + "application/rsd+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rsd"] + }, + "application/rss+xml": { + "source": "apache", + "compressible": true, + "extensions": ["rss"] + }, + "application/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "application/rtploopback": { + "source": "iana" + }, + "application/rtx": { + "source": "iana" + }, + "application/samlassertion+xml": { + "source": "iana", + "compressible": true + }, + "application/samlmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/sarif+json": { + "source": "iana", + "compressible": true + }, + "application/sarif-external-properties+json": { + "source": "iana", + "compressible": true + }, + "application/sbe": { + "source": "iana" + }, + "application/sbml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sbml"] + }, + "application/scaip+xml": { + "source": "iana", + "compressible": true + }, + "application/scim+json": { + "source": "iana", + "compressible": true + }, + "application/scvp-cv-request": { + "source": "iana", + "extensions": ["scq"] + }, + "application/scvp-cv-response": { + "source": "iana", + "extensions": ["scs"] + }, + "application/scvp-vp-request": { + "source": "iana", + "extensions": ["spq"] + }, + "application/scvp-vp-response": { + "source": "iana", + "extensions": ["spp"] + }, + "application/sdp": { + "source": "iana", + "extensions": ["sdp"] + }, + "application/secevent+jwt": { + "source": "iana" + }, + "application/senml+cbor": { + "source": "iana" + }, + "application/senml+json": { + "source": "iana", + "compressible": true + }, + "application/senml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["senmlx"] + }, + "application/senml-etch+cbor": { + "source": "iana" + }, + "application/senml-etch+json": { + "source": "iana", + "compressible": true + }, + "application/senml-exi": { + "source": "iana" + }, + "application/sensml+cbor": { + "source": "iana" + }, + "application/sensml+json": { + "source": "iana", + "compressible": true + }, + "application/sensml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sensmlx"] + }, + "application/sensml-exi": { + "source": "iana" + }, + "application/sep+xml": { + "source": "iana", + "compressible": true + }, + "application/sep-exi": { + "source": "iana" + }, + "application/session-info": { + "source": "iana" + }, + "application/set-payment": { + "source": "iana" + }, + "application/set-payment-initiation": { + "source": "iana", + "extensions": ["setpay"] + }, + "application/set-registration": { + "source": "iana" + }, + "application/set-registration-initiation": { + "source": "iana", + "extensions": ["setreg"] + }, + "application/sgml": { + "source": "iana" + }, + "application/sgml-open-catalog": { + "source": "iana" + }, + "application/shf+xml": { + "source": "iana", + "compressible": true, + "extensions": ["shf"] + }, + "application/sieve": { + "source": "iana", + "extensions": ["siv","sieve"] + }, + "application/simple-filter+xml": { + "source": "iana", + "compressible": true + }, + "application/simple-message-summary": { + "source": "iana" + }, + "application/simplesymbolcontainer": { + "source": "iana" + }, + "application/sipc": { + "source": "iana" + }, + "application/slate": { + "source": "iana" + }, + "application/smil": { + "source": "iana" + }, + "application/smil+xml": { + "source": "iana", + "compressible": true, + "extensions": ["smi","smil"] + }, + "application/smpte336m": { + "source": "iana" + }, + "application/soap+fastinfoset": { + "source": "iana" + }, + "application/soap+xml": { + "source": "iana", + "compressible": true + }, + "application/sparql-query": { + "source": "iana", + "extensions": ["rq"] + }, + "application/sparql-results+xml": { + "source": "iana", + "compressible": true, + "extensions": ["srx"] + }, + "application/spdx+json": { + "source": "iana", + "compressible": true + }, + "application/spirits-event+xml": { + "source": "iana", + "compressible": true + }, + "application/sql": { + "source": "iana" + }, + "application/srgs": { + "source": "iana", + "extensions": ["gram"] + }, + "application/srgs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["grxml"] + }, + "application/sru+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sru"] + }, + "application/ssdl+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ssdl"] + }, + "application/ssml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ssml"] + }, + "application/stix+json": { + "source": "iana", + "compressible": true + }, + "application/swid+xml": { + "source": "iana", + "compressible": true, + "extensions": ["swidtag"] + }, + "application/tamp-apex-update": { + "source": "iana" + }, + "application/tamp-apex-update-confirm": { + "source": "iana" + }, + "application/tamp-community-update": { + "source": "iana" + }, + "application/tamp-community-update-confirm": { + "source": "iana" + }, + "application/tamp-error": { + "source": "iana" + }, + "application/tamp-sequence-adjust": { + "source": "iana" + }, + "application/tamp-sequence-adjust-confirm": { + "source": "iana" + }, + "application/tamp-status-query": { + "source": "iana" + }, + "application/tamp-status-response": { + "source": "iana" + }, + "application/tamp-update": { + "source": "iana" + }, + "application/tamp-update-confirm": { + "source": "iana" + }, + "application/tar": { + "compressible": true + }, + "application/taxii+json": { + "source": "iana", + "compressible": true + }, + "application/td+json": { + "source": "iana", + "compressible": true + }, + "application/tei+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tei","teicorpus"] + }, + "application/tetra_isi": { + "source": "iana" + }, + "application/thraud+xml": { + "source": "iana", + "compressible": true, + "extensions": ["tfi"] + }, + "application/timestamp-query": { + "source": "iana" + }, + "application/timestamp-reply": { + "source": "iana" + }, + "application/timestamped-data": { + "source": "iana", + "extensions": ["tsd"] + }, + "application/tlsrpt+gzip": { + "source": "iana" + }, + "application/tlsrpt+json": { + "source": "iana", + "compressible": true + }, + "application/tnauthlist": { + "source": "iana" + }, + "application/token-introspection+jwt": { + "source": "iana" + }, + "application/toml": { + "compressible": true, + "extensions": ["toml"] + }, + "application/trickle-ice-sdpfrag": { + "source": "iana" + }, + "application/trig": { + "source": "iana", + "extensions": ["trig"] + }, + "application/ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ttml"] + }, + "application/tve-trigger": { + "source": "iana" + }, + "application/tzif": { + "source": "iana" + }, + "application/tzif-leap": { + "source": "iana" + }, + "application/ubjson": { + "compressible": false, + "extensions": ["ubj"] + }, + "application/ulpfec": { + "source": "iana" + }, + "application/urc-grpsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/urc-ressheet+xml": { + "source": "iana", + "compressible": true, + "extensions": ["rsheet"] + }, + "application/urc-targetdesc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["td"] + }, + "application/urc-uisocketdesc+xml": { + "source": "iana", + "compressible": true + }, + "application/vcard+json": { + "source": "iana", + "compressible": true + }, + "application/vcard+xml": { + "source": "iana", + "compressible": true + }, + "application/vemmi": { + "source": "iana" + }, + "application/vividence.scriptfile": { + "source": "apache" + }, + "application/vnd.1000minds.decision-model+xml": { + "source": "iana", + "compressible": true, + "extensions": ["1km"] + }, + "application/vnd.3gpp-prose+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-prose-pc3ch+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp-v2x-local-service-information": { + "source": "iana" + }, + "application/vnd.3gpp.5gnas": { + "source": "iana" + }, + "application/vnd.3gpp.access-transfer-events+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.bsf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gmop+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.gtpc": { + "source": "iana" + }, + "application/vnd.3gpp.interworking-data": { + "source": "iana" + }, + "application/vnd.3gpp.lpp": { + "source": "iana" + }, + "application/vnd.3gpp.mc-signalling-ear": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-payload": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-signalling": { + "source": "iana" + }, + "application/vnd.3gpp.mcdata-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcdata-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-floor-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-signed+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-ue-init-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcptt-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-command+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-affiliation-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-location-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-mbms-usage-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-service-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-transmission-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-ue-config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mcvideo-user-profile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.mid-call+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ngap": { + "source": "iana" + }, + "application/vnd.3gpp.pfcp": { + "source": "iana" + }, + "application/vnd.3gpp.pic-bw-large": { + "source": "iana", + "extensions": ["plb"] + }, + "application/vnd.3gpp.pic-bw-small": { + "source": "iana", + "extensions": ["psb"] + }, + "application/vnd.3gpp.pic-bw-var": { + "source": "iana", + "extensions": ["pvb"] + }, + "application/vnd.3gpp.s1ap": { + "source": "iana" + }, + "application/vnd.3gpp.sms": { + "source": "iana" + }, + "application/vnd.3gpp.sms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-ext+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.srvcc-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.state-and-event-info+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp.ussd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.bcmcsinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.3gpp2.sms": { + "source": "iana" + }, + "application/vnd.3gpp2.tcap": { + "source": "iana", + "extensions": ["tcap"] + }, + "application/vnd.3lightssoftware.imagescal": { + "source": "iana" + }, + "application/vnd.3m.post-it-notes": { + "source": "iana", + "extensions": ["pwn"] + }, + "application/vnd.accpac.simply.aso": { + "source": "iana", + "extensions": ["aso"] + }, + "application/vnd.accpac.simply.imp": { + "source": "iana", + "extensions": ["imp"] + }, + "application/vnd.acucobol": { + "source": "iana", + "extensions": ["acu"] + }, + "application/vnd.acucorp": { + "source": "iana", + "extensions": ["atc","acutc"] + }, + "application/vnd.adobe.air-application-installer-package+zip": { + "source": "apache", + "compressible": false, + "extensions": ["air"] + }, + "application/vnd.adobe.flash.movie": { + "source": "iana" + }, + "application/vnd.adobe.formscentral.fcdt": { + "source": "iana", + "extensions": ["fcdt"] + }, + "application/vnd.adobe.fxp": { + "source": "iana", + "extensions": ["fxp","fxpl"] + }, + "application/vnd.adobe.partial-upload": { + "source": "iana" + }, + "application/vnd.adobe.xdp+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdp"] + }, + "application/vnd.adobe.xfdf": { + "source": "iana", + "extensions": ["xfdf"] + }, + "application/vnd.aether.imp": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata": { + "source": "iana" + }, + "application/vnd.afpc.afplinedata-pagedef": { + "source": "iana" + }, + "application/vnd.afpc.cmoca-cmresource": { + "source": "iana" + }, + "application/vnd.afpc.foca-charset": { + "source": "iana" + }, + "application/vnd.afpc.foca-codedfont": { + "source": "iana" + }, + "application/vnd.afpc.foca-codepage": { + "source": "iana" + }, + "application/vnd.afpc.modca": { + "source": "iana" + }, + "application/vnd.afpc.modca-cmtable": { + "source": "iana" + }, + "application/vnd.afpc.modca-formdef": { + "source": "iana" + }, + "application/vnd.afpc.modca-mediummap": { + "source": "iana" + }, + "application/vnd.afpc.modca-objectcontainer": { + "source": "iana" + }, + "application/vnd.afpc.modca-overlay": { + "source": "iana" + }, + "application/vnd.afpc.modca-pagesegment": { + "source": "iana" + }, + "application/vnd.age": { + "source": "iana", + "extensions": ["age"] + }, + "application/vnd.ah-barcode": { + "source": "iana" + }, + "application/vnd.ahead.space": { + "source": "iana", + "extensions": ["ahead"] + }, + "application/vnd.airzip.filesecure.azf": { + "source": "iana", + "extensions": ["azf"] + }, + "application/vnd.airzip.filesecure.azs": { + "source": "iana", + "extensions": ["azs"] + }, + "application/vnd.amadeus+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.amazon.ebook": { + "source": "apache", + "extensions": ["azw"] + }, + "application/vnd.amazon.mobi8-ebook": { + "source": "iana" + }, + "application/vnd.americandynamics.acc": { + "source": "iana", + "extensions": ["acc"] + }, + "application/vnd.amiga.ami": { + "source": "iana", + "extensions": ["ami"] + }, + "application/vnd.amundsen.maze+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.android.ota": { + "source": "iana" + }, + "application/vnd.android.package-archive": { + "source": "apache", + "compressible": false, + "extensions": ["apk"] + }, + "application/vnd.anki": { + "source": "iana" + }, + "application/vnd.anser-web-certificate-issue-initiation": { + "source": "iana", + "extensions": ["cii"] + }, + "application/vnd.anser-web-funds-transfer-initiation": { + "source": "apache", + "extensions": ["fti"] + }, + "application/vnd.antix.game-component": { + "source": "iana", + "extensions": ["atx"] + }, + "application/vnd.apache.arrow.file": { + "source": "iana" + }, + "application/vnd.apache.arrow.stream": { + "source": "iana" + }, + "application/vnd.apache.thrift.binary": { + "source": "iana" + }, + "application/vnd.apache.thrift.compact": { + "source": "iana" + }, + "application/vnd.apache.thrift.json": { + "source": "iana" + }, + "application/vnd.api+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.aplextor.warrp+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apothekende.reservation+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.apple.installer+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mpkg"] + }, + "application/vnd.apple.keynote": { + "source": "iana", + "extensions": ["key"] + }, + "application/vnd.apple.mpegurl": { + "source": "iana", + "extensions": ["m3u8"] + }, + "application/vnd.apple.numbers": { + "source": "iana", + "extensions": ["numbers"] + }, + "application/vnd.apple.pages": { + "source": "iana", + "extensions": ["pages"] + }, + "application/vnd.apple.pkpass": { + "compressible": false, + "extensions": ["pkpass"] + }, + "application/vnd.arastra.swi": { + "source": "iana" + }, + "application/vnd.aristanetworks.swi": { + "source": "iana", + "extensions": ["swi"] + }, + "application/vnd.artisan+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.artsquare": { + "source": "iana" + }, + "application/vnd.astraea-software.iota": { + "source": "iana", + "extensions": ["iota"] + }, + "application/vnd.audiograph": { + "source": "iana", + "extensions": ["aep"] + }, + "application/vnd.autopackage": { + "source": "iana" + }, + "application/vnd.avalon+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.avistar+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.balsamiq.bmml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["bmml"] + }, + "application/vnd.balsamiq.bmpr": { + "source": "iana" + }, + "application/vnd.banana-accounting": { + "source": "iana" + }, + "application/vnd.bbf.usp.error": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg": { + "source": "iana" + }, + "application/vnd.bbf.usp.msg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bekitzur-stech+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.bint.med-content": { + "source": "iana" + }, + "application/vnd.biopax.rdf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.blink-idb-value-wrapper": { + "source": "iana" + }, + "application/vnd.blueice.multipass": { + "source": "iana", + "extensions": ["mpm"] + }, + "application/vnd.bluetooth.ep.oob": { + "source": "iana" + }, + "application/vnd.bluetooth.le.oob": { + "source": "iana" + }, + "application/vnd.bmi": { + "source": "iana", + "extensions": ["bmi"] + }, + "application/vnd.bpf": { + "source": "iana" + }, + "application/vnd.bpf3": { + "source": "iana" + }, + "application/vnd.businessobjects": { + "source": "iana", + "extensions": ["rep"] + }, + "application/vnd.byu.uapi+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cab-jscript": { + "source": "iana" + }, + "application/vnd.canon-cpdl": { + "source": "iana" + }, + "application/vnd.canon-lips": { + "source": "iana" + }, + "application/vnd.capasystems-pg+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cendio.thinlinc.clientconf": { + "source": "iana" + }, + "application/vnd.century-systems.tcp_stream": { + "source": "iana" + }, + "application/vnd.chemdraw+xml": { + "source": "iana", + "compressible": true, + "extensions": ["cdxml"] + }, + "application/vnd.chess-pgn": { + "source": "iana" + }, + "application/vnd.chipnuts.karaoke-mmd": { + "source": "iana", + "extensions": ["mmd"] + }, + "application/vnd.ciedi": { + "source": "iana" + }, + "application/vnd.cinderella": { + "source": "iana", + "extensions": ["cdy"] + }, + "application/vnd.cirpack.isdn-ext": { + "source": "iana" + }, + "application/vnd.citationstyles.style+xml": { + "source": "iana", + "compressible": true, + "extensions": ["csl"] + }, + "application/vnd.claymore": { + "source": "iana", + "extensions": ["cla"] + }, + "application/vnd.cloanto.rp9": { + "source": "iana", + "extensions": ["rp9"] + }, + "application/vnd.clonk.c4group": { + "source": "iana", + "extensions": ["c4g","c4d","c4f","c4p","c4u"] + }, + "application/vnd.cluetrust.cartomobile-config": { + "source": "iana", + "extensions": ["c11amc"] + }, + "application/vnd.cluetrust.cartomobile-config-pkg": { + "source": "iana", + "extensions": ["c11amz"] + }, + "application/vnd.coffeescript": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.document-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.presentation-template": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet": { + "source": "iana" + }, + "application/vnd.collabio.xodocuments.spreadsheet-template": { + "source": "iana" + }, + "application/vnd.collection+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.doc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.collection.next+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.comicbook+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.comicbook-rar": { + "source": "iana" + }, + "application/vnd.commerce-battelle": { + "source": "iana" + }, + "application/vnd.commonspace": { + "source": "iana", + "extensions": ["csp"] + }, + "application/vnd.contact.cmsg": { + "source": "iana", + "extensions": ["cdbcmsg"] + }, + "application/vnd.coreos.ignition+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cosmocaller": { + "source": "iana", + "extensions": ["cmc"] + }, + "application/vnd.crick.clicker": { + "source": "iana", + "extensions": ["clkx"] + }, + "application/vnd.crick.clicker.keyboard": { + "source": "iana", + "extensions": ["clkk"] + }, + "application/vnd.crick.clicker.palette": { + "source": "iana", + "extensions": ["clkp"] + }, + "application/vnd.crick.clicker.template": { + "source": "iana", + "extensions": ["clkt"] + }, + "application/vnd.crick.clicker.wordbank": { + "source": "iana", + "extensions": ["clkw"] + }, + "application/vnd.criticaltools.wbs+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wbs"] + }, + "application/vnd.cryptii.pipe+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.crypto-shade-file": { + "source": "iana" + }, + "application/vnd.cryptomator.encrypted": { + "source": "iana" + }, + "application/vnd.cryptomator.vault": { + "source": "iana" + }, + "application/vnd.ctc-posml": { + "source": "iana", + "extensions": ["pml"] + }, + "application/vnd.ctct.ws+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cups-pdf": { + "source": "iana" + }, + "application/vnd.cups-postscript": { + "source": "iana" + }, + "application/vnd.cups-ppd": { + "source": "iana", + "extensions": ["ppd"] + }, + "application/vnd.cups-raster": { + "source": "iana" + }, + "application/vnd.cups-raw": { + "source": "iana" + }, + "application/vnd.curl": { + "source": "iana" + }, + "application/vnd.curl.car": { + "source": "apache", + "extensions": ["car"] + }, + "application/vnd.curl.pcurl": { + "source": "apache", + "extensions": ["pcurl"] + }, + "application/vnd.cyan.dean.root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.cybank": { + "source": "iana" + }, + "application/vnd.cyclonedx+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.cyclonedx+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.d2l.coursepackage1p0+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.d3m-dataset": { + "source": "iana" + }, + "application/vnd.d3m-problem": { + "source": "iana" + }, + "application/vnd.dart": { + "source": "iana", + "compressible": true, + "extensions": ["dart"] + }, + "application/vnd.data-vision.rdz": { + "source": "iana", + "extensions": ["rdz"] + }, + "application/vnd.datapackage+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dataresource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dbf": { + "source": "iana", + "extensions": ["dbf"] + }, + "application/vnd.debian.binary-package": { + "source": "iana" + }, + "application/vnd.dece.data": { + "source": "iana", + "extensions": ["uvf","uvvf","uvd","uvvd"] + }, + "application/vnd.dece.ttml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uvt","uvvt"] + }, + "application/vnd.dece.unspecified": { + "source": "iana", + "extensions": ["uvx","uvvx"] + }, + "application/vnd.dece.zip": { + "source": "iana", + "extensions": ["uvz","uvvz"] + }, + "application/vnd.denovo.fcselayout-link": { + "source": "iana", + "extensions": ["fe_launch"] + }, + "application/vnd.desmume.movie": { + "source": "iana" + }, + "application/vnd.dir-bi.plate-dl-nosuffix": { + "source": "iana" + }, + "application/vnd.dm.delegation+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dna": { + "source": "iana", + "extensions": ["dna"] + }, + "application/vnd.document+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.dolby.mlp": { + "source": "apache", + "extensions": ["mlp"] + }, + "application/vnd.dolby.mobile.1": { + "source": "iana" + }, + "application/vnd.dolby.mobile.2": { + "source": "iana" + }, + "application/vnd.doremir.scorecloud-binary-document": { + "source": "iana" + }, + "application/vnd.dpgraph": { + "source": "iana", + "extensions": ["dpg"] + }, + "application/vnd.dreamfactory": { + "source": "iana", + "extensions": ["dfac"] + }, + "application/vnd.drive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ds-keypoint": { + "source": "apache", + "extensions": ["kpxx"] + }, + "application/vnd.dtg.local": { + "source": "iana" + }, + "application/vnd.dtg.local.flash": { + "source": "iana" + }, + "application/vnd.dtg.local.html": { + "source": "iana" + }, + "application/vnd.dvb.ait": { + "source": "iana", + "extensions": ["ait"] + }, + "application/vnd.dvb.dvbisl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.dvbj": { + "source": "iana" + }, + "application/vnd.dvb.esgcontainer": { + "source": "iana" + }, + "application/vnd.dvb.ipdcdftnotifaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgaccess2": { + "source": "iana" + }, + "application/vnd.dvb.ipdcesgpdd": { + "source": "iana" + }, + "application/vnd.dvb.ipdcroaming": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-base": { + "source": "iana" + }, + "application/vnd.dvb.iptv.alfec-enhancement": { + "source": "iana" + }, + "application/vnd.dvb.notif-aggregate-root+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-container+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-generic+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-msglist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-ia-registration-response+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.notif-init+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.dvb.pfr": { + "source": "iana" + }, + "application/vnd.dvb.service": { + "source": "iana", + "extensions": ["svc"] + }, + "application/vnd.dxr": { + "source": "iana" + }, + "application/vnd.dynageo": { + "source": "iana", + "extensions": ["geo"] + }, + "application/vnd.dzr": { + "source": "iana" + }, + "application/vnd.easykaraoke.cdgdownload": { + "source": "iana" + }, + "application/vnd.ecdis-update": { + "source": "iana" + }, + "application/vnd.ecip.rlp": { + "source": "iana" + }, + "application/vnd.eclipse.ditto+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ecowin.chart": { + "source": "iana", + "extensions": ["mag"] + }, + "application/vnd.ecowin.filerequest": { + "source": "iana" + }, + "application/vnd.ecowin.fileupdate": { + "source": "iana" + }, + "application/vnd.ecowin.series": { + "source": "iana" + }, + "application/vnd.ecowin.seriesrequest": { + "source": "iana" + }, + "application/vnd.ecowin.seriesupdate": { + "source": "iana" + }, + "application/vnd.efi.img": { + "source": "iana" + }, + "application/vnd.efi.iso": { + "source": "iana" + }, + "application/vnd.emclient.accessrequest+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.enliven": { + "source": "iana", + "extensions": ["nml"] + }, + "application/vnd.enphase.envoy": { + "source": "iana" + }, + "application/vnd.eprints.data+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.epson.esf": { + "source": "iana", + "extensions": ["esf"] + }, + "application/vnd.epson.msf": { + "source": "iana", + "extensions": ["msf"] + }, + "application/vnd.epson.quickanime": { + "source": "iana", + "extensions": ["qam"] + }, + "application/vnd.epson.salt": { + "source": "iana", + "extensions": ["slt"] + }, + "application/vnd.epson.ssf": { + "source": "iana", + "extensions": ["ssf"] + }, + "application/vnd.ericsson.quickcall": { + "source": "iana" + }, + "application/vnd.espass-espass+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.eszigno3+xml": { + "source": "iana", + "compressible": true, + "extensions": ["es3","et3"] + }, + "application/vnd.etsi.aoc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.asic-e+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.asic-s+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.etsi.cug+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvcommand+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-bc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-cod+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsad-npvr+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvservice+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvsync+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.iptvueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mcid+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.mheg5": { + "source": "iana" + }, + "application/vnd.etsi.overload-control-policy-dataset+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.pstn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.sci+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.simservs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.timestamp-token": { + "source": "iana" + }, + "application/vnd.etsi.tsl+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.etsi.tsl.der": { + "source": "iana" + }, + "application/vnd.eu.kasparian.car+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.eudora.data": { + "source": "iana" + }, + "application/vnd.evolv.ecig.profile": { + "source": "iana" + }, + "application/vnd.evolv.ecig.settings": { + "source": "iana" + }, + "application/vnd.evolv.ecig.theme": { + "source": "iana" + }, + "application/vnd.exstream-empower+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.exstream-package": { + "source": "iana" + }, + "application/vnd.ezpix-album": { + "source": "iana", + "extensions": ["ez2"] + }, + "application/vnd.ezpix-package": { + "source": "iana", + "extensions": ["ez3"] + }, + "application/vnd.f-secure.mobile": { + "source": "iana" + }, + "application/vnd.familysearch.gedcom+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.fastcopy-disk-image": { + "source": "iana" + }, + "application/vnd.fdf": { + "source": "iana", + "extensions": ["fdf"] + }, + "application/vnd.fdsn.mseed": { + "source": "iana", + "extensions": ["mseed"] + }, + "application/vnd.fdsn.seed": { + "source": "iana", + "extensions": ["seed","dataless"] + }, + "application/vnd.ffsns": { + "source": "iana" + }, + "application/vnd.ficlab.flb+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.filmit.zfc": { + "source": "iana" + }, + "application/vnd.fints": { + "source": "iana" + }, + "application/vnd.firemonkeys.cloudcell": { + "source": "iana" + }, + "application/vnd.flographit": { + "source": "iana", + "extensions": ["gph"] + }, + "application/vnd.fluxtime.clip": { + "source": "iana", + "extensions": ["ftc"] + }, + "application/vnd.font-fontforge-sfd": { + "source": "iana" + }, + "application/vnd.framemaker": { + "source": "iana", + "extensions": ["fm","frame","maker","book"] + }, + "application/vnd.frogans.fnc": { + "source": "iana", + "extensions": ["fnc"] + }, + "application/vnd.frogans.ltf": { + "source": "iana", + "extensions": ["ltf"] + }, + "application/vnd.fsc.weblaunch": { + "source": "iana", + "extensions": ["fsc"] + }, + "application/vnd.fujifilm.fb.docuworks": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.binder": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujifilm.fb.jfi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.fujitsu.oasys": { + "source": "iana", + "extensions": ["oas"] + }, + "application/vnd.fujitsu.oasys2": { + "source": "iana", + "extensions": ["oa2"] + }, + "application/vnd.fujitsu.oasys3": { + "source": "iana", + "extensions": ["oa3"] + }, + "application/vnd.fujitsu.oasysgp": { + "source": "iana", + "extensions": ["fg5"] + }, + "application/vnd.fujitsu.oasysprs": { + "source": "iana", + "extensions": ["bh2"] + }, + "application/vnd.fujixerox.art-ex": { + "source": "iana" + }, + "application/vnd.fujixerox.art4": { + "source": "iana" + }, + "application/vnd.fujixerox.ddd": { + "source": "iana", + "extensions": ["ddd"] + }, + "application/vnd.fujixerox.docuworks": { + "source": "iana", + "extensions": ["xdw"] + }, + "application/vnd.fujixerox.docuworks.binder": { + "source": "iana", + "extensions": ["xbd"] + }, + "application/vnd.fujixerox.docuworks.container": { + "source": "iana" + }, + "application/vnd.fujixerox.hbpl": { + "source": "iana" + }, + "application/vnd.fut-misnet": { + "source": "iana" + }, + "application/vnd.futoin+cbor": { + "source": "iana" + }, + "application/vnd.futoin+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.fuzzysheet": { + "source": "iana", + "extensions": ["fzs"] + }, + "application/vnd.genomatix.tuxedo": { + "source": "iana", + "extensions": ["txd"] + }, + "application/vnd.gentics.grd+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geo+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.geocube+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.geogebra.file": { + "source": "iana", + "extensions": ["ggb"] + }, + "application/vnd.geogebra.slides": { + "source": "iana" + }, + "application/vnd.geogebra.tool": { + "source": "iana", + "extensions": ["ggt"] + }, + "application/vnd.geometry-explorer": { + "source": "iana", + "extensions": ["gex","gre"] + }, + "application/vnd.geonext": { + "source": "iana", + "extensions": ["gxt"] + }, + "application/vnd.geoplan": { + "source": "iana", + "extensions": ["g2w"] + }, + "application/vnd.geospace": { + "source": "iana", + "extensions": ["g3w"] + }, + "application/vnd.gerber": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt": { + "source": "iana" + }, + "application/vnd.globalplatform.card-content-mgt-response": { + "source": "iana" + }, + "application/vnd.gmx": { + "source": "iana", + "extensions": ["gmx"] + }, + "application/vnd.google-apps.document": { + "compressible": false, + "extensions": ["gdoc"] + }, + "application/vnd.google-apps.presentation": { + "compressible": false, + "extensions": ["gslides"] + }, + "application/vnd.google-apps.spreadsheet": { + "compressible": false, + "extensions": ["gsheet"] + }, + "application/vnd.google-earth.kml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["kml"] + }, + "application/vnd.google-earth.kmz": { + "source": "iana", + "compressible": false, + "extensions": ["kmz"] + }, + "application/vnd.gov.sk.e-form+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.gov.sk.e-form+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.gov.sk.xmldatacontainer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.grafeq": { + "source": "iana", + "extensions": ["gqf","gqs"] + }, + "application/vnd.gridmp": { + "source": "iana" + }, + "application/vnd.groove-account": { + "source": "iana", + "extensions": ["gac"] + }, + "application/vnd.groove-help": { + "source": "iana", + "extensions": ["ghf"] + }, + "application/vnd.groove-identity-message": { + "source": "iana", + "extensions": ["gim"] + }, + "application/vnd.groove-injector": { + "source": "iana", + "extensions": ["grv"] + }, + "application/vnd.groove-tool-message": { + "source": "iana", + "extensions": ["gtm"] + }, + "application/vnd.groove-tool-template": { + "source": "iana", + "extensions": ["tpl"] + }, + "application/vnd.groove-vcard": { + "source": "iana", + "extensions": ["vcg"] + }, + "application/vnd.hal+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hal+xml": { + "source": "iana", + "compressible": true, + "extensions": ["hal"] + }, + "application/vnd.handheld-entertainment+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zmm"] + }, + "application/vnd.hbci": { + "source": "iana", + "extensions": ["hbci"] + }, + "application/vnd.hc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hcl-bireports": { + "source": "iana" + }, + "application/vnd.hdt": { + "source": "iana" + }, + "application/vnd.heroku+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hhe.lesson-player": { + "source": "iana", + "extensions": ["les"] + }, + "application/vnd.hl7cda+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hl7v2+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.hp-hpgl": { + "source": "iana", + "extensions": ["hpgl"] + }, + "application/vnd.hp-hpid": { + "source": "iana", + "extensions": ["hpid"] + }, + "application/vnd.hp-hps": { + "source": "iana", + "extensions": ["hps"] + }, + "application/vnd.hp-jlyt": { + "source": "iana", + "extensions": ["jlt"] + }, + "application/vnd.hp-pcl": { + "source": "iana", + "extensions": ["pcl"] + }, + "application/vnd.hp-pclxl": { + "source": "iana", + "extensions": ["pclxl"] + }, + "application/vnd.httphone": { + "source": "iana" + }, + "application/vnd.hydrostatix.sof-data": { + "source": "iana", + "extensions": ["sfd-hdstx"] + }, + "application/vnd.hyper+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyper-item+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hyperdrive+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.hzn-3d-crossword": { + "source": "iana" + }, + "application/vnd.ibm.afplinedata": { + "source": "iana" + }, + "application/vnd.ibm.electronic-media": { + "source": "iana" + }, + "application/vnd.ibm.minipay": { + "source": "iana", + "extensions": ["mpy"] + }, + "application/vnd.ibm.modcap": { + "source": "iana", + "extensions": ["afp","listafp","list3820"] + }, + "application/vnd.ibm.rights-management": { + "source": "iana", + "extensions": ["irm"] + }, + "application/vnd.ibm.secure-container": { + "source": "iana", + "extensions": ["sc"] + }, + "application/vnd.iccprofile": { + "source": "iana", + "extensions": ["icc","icm"] + }, + "application/vnd.ieee.1905": { + "source": "iana" + }, + "application/vnd.igloader": { + "source": "iana", + "extensions": ["igl"] + }, + "application/vnd.imagemeter.folder+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.imagemeter.image+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.immervision-ivp": { + "source": "iana", + "extensions": ["ivp"] + }, + "application/vnd.immervision-ivu": { + "source": "iana", + "extensions": ["ivu"] + }, + "application/vnd.ims.imsccv1p1": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p2": { + "source": "iana" + }, + "application/vnd.ims.imsccv1p3": { + "source": "iana" + }, + "application/vnd.ims.lis.v2.result+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolconsumerprofile+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolproxy.id+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ims.lti.v2.toolsettings.simple+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.informedcontrol.rms+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.informix-visionary": { + "source": "iana" + }, + "application/vnd.infotech.project": { + "source": "iana" + }, + "application/vnd.infotech.project+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.innopath.wamp.notification": { + "source": "iana" + }, + "application/vnd.insors.igm": { + "source": "iana", + "extensions": ["igm"] + }, + "application/vnd.intercon.formnet": { + "source": "iana", + "extensions": ["xpw","xpx"] + }, + "application/vnd.intergeo": { + "source": "iana", + "extensions": ["i2g"] + }, + "application/vnd.intertrust.digibox": { + "source": "iana" + }, + "application/vnd.intertrust.nncp": { + "source": "iana" + }, + "application/vnd.intu.qbo": { + "source": "iana", + "extensions": ["qbo"] + }, + "application/vnd.intu.qfx": { + "source": "iana", + "extensions": ["qfx"] + }, + "application/vnd.iptc.g2.catalogitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.conceptitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.knowledgeitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.newsmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.packageitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.iptc.g2.planningitem+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ipunplugged.rcprofile": { + "source": "iana", + "extensions": ["rcprofile"] + }, + "application/vnd.irepository.package+xml": { + "source": "iana", + "compressible": true, + "extensions": ["irp"] + }, + "application/vnd.is-xpr": { + "source": "iana", + "extensions": ["xpr"] + }, + "application/vnd.isac.fcs": { + "source": "iana", + "extensions": ["fcs"] + }, + "application/vnd.iso11783-10+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.jam": { + "source": "iana", + "extensions": ["jam"] + }, + "application/vnd.japannet-directory-service": { + "source": "iana" + }, + "application/vnd.japannet-jpnstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-payment-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-registration": { + "source": "iana" + }, + "application/vnd.japannet-registration-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-setstore-wakeup": { + "source": "iana" + }, + "application/vnd.japannet-verification": { + "source": "iana" + }, + "application/vnd.japannet-verification-wakeup": { + "source": "iana" + }, + "application/vnd.jcp.javame.midlet-rms": { + "source": "iana", + "extensions": ["rms"] + }, + "application/vnd.jisp": { + "source": "iana", + "extensions": ["jisp"] + }, + "application/vnd.joost.joda-archive": { + "source": "iana", + "extensions": ["joda"] + }, + "application/vnd.jsk.isdn-ngn": { + "source": "iana" + }, + "application/vnd.kahootz": { + "source": "iana", + "extensions": ["ktz","ktr"] + }, + "application/vnd.kde.karbon": { + "source": "iana", + "extensions": ["karbon"] + }, + "application/vnd.kde.kchart": { + "source": "iana", + "extensions": ["chrt"] + }, + "application/vnd.kde.kformula": { + "source": "iana", + "extensions": ["kfo"] + }, + "application/vnd.kde.kivio": { + "source": "iana", + "extensions": ["flw"] + }, + "application/vnd.kde.kontour": { + "source": "iana", + "extensions": ["kon"] + }, + "application/vnd.kde.kpresenter": { + "source": "iana", + "extensions": ["kpr","kpt"] + }, + "application/vnd.kde.kspread": { + "source": "iana", + "extensions": ["ksp"] + }, + "application/vnd.kde.kword": { + "source": "iana", + "extensions": ["kwd","kwt"] + }, + "application/vnd.kenameaapp": { + "source": "iana", + "extensions": ["htke"] + }, + "application/vnd.kidspiration": { + "source": "iana", + "extensions": ["kia"] + }, + "application/vnd.kinar": { + "source": "iana", + "extensions": ["kne","knp"] + }, + "application/vnd.koan": { + "source": "iana", + "extensions": ["skp","skd","skt","skm"] + }, + "application/vnd.kodak-descriptor": { + "source": "iana", + "extensions": ["sse"] + }, + "application/vnd.las": { + "source": "iana" + }, + "application/vnd.las.las+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.las.las+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lasxml"] + }, + "application/vnd.laszip": { + "source": "iana" + }, + "application/vnd.leap+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.liberty-request+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.llamagraphics.life-balance.desktop": { + "source": "iana", + "extensions": ["lbd"] + }, + "application/vnd.llamagraphics.life-balance.exchange+xml": { + "source": "iana", + "compressible": true, + "extensions": ["lbe"] + }, + "application/vnd.logipipe.circuit+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.loom": { + "source": "iana" + }, + "application/vnd.lotus-1-2-3": { + "source": "iana", + "extensions": ["123"] + }, + "application/vnd.lotus-approach": { + "source": "iana", + "extensions": ["apr"] + }, + "application/vnd.lotus-freelance": { + "source": "iana", + "extensions": ["pre"] + }, + "application/vnd.lotus-notes": { + "source": "iana", + "extensions": ["nsf"] + }, + "application/vnd.lotus-organizer": { + "source": "iana", + "extensions": ["org"] + }, + "application/vnd.lotus-screencam": { + "source": "iana", + "extensions": ["scm"] + }, + "application/vnd.lotus-wordpro": { + "source": "iana", + "extensions": ["lwp"] + }, + "application/vnd.macports.portpkg": { + "source": "iana", + "extensions": ["portpkg"] + }, + "application/vnd.mapbox-vector-tile": { + "source": "iana", + "extensions": ["mvt"] + }, + "application/vnd.marlin.drm.actiontoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.conftoken+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.license+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.marlin.drm.mdcf": { + "source": "iana" + }, + "application/vnd.mason+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.maxar.archive.3tz+zip": { + "source": "iana", + "compressible": false + }, + "application/vnd.maxmind.maxmind-db": { + "source": "iana" + }, + "application/vnd.mcd": { + "source": "iana", + "extensions": ["mcd"] + }, + "application/vnd.medcalcdata": { + "source": "iana", + "extensions": ["mc1"] + }, + "application/vnd.mediastation.cdkey": { + "source": "iana", + "extensions": ["cdkey"] + }, + "application/vnd.meridian-slingshot": { + "source": "iana" + }, + "application/vnd.mfer": { + "source": "iana", + "extensions": ["mwf"] + }, + "application/vnd.mfmp": { + "source": "iana", + "extensions": ["mfm"] + }, + "application/vnd.micro+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.micrografx.flo": { + "source": "iana", + "extensions": ["flo"] + }, + "application/vnd.micrografx.igx": { + "source": "iana", + "extensions": ["igx"] + }, + "application/vnd.microsoft.portable-executable": { + "source": "iana" + }, + "application/vnd.microsoft.windows.thumbnail-cache": { + "source": "iana" + }, + "application/vnd.miele+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.mif": { + "source": "iana", + "extensions": ["mif"] + }, + "application/vnd.minisoft-hp3000-save": { + "source": "iana" + }, + "application/vnd.mitsubishi.misty-guard.trustweb": { + "source": "iana" + }, + "application/vnd.mobius.daf": { + "source": "iana", + "extensions": ["daf"] + }, + "application/vnd.mobius.dis": { + "source": "iana", + "extensions": ["dis"] + }, + "application/vnd.mobius.mbk": { + "source": "iana", + "extensions": ["mbk"] + }, + "application/vnd.mobius.mqy": { + "source": "iana", + "extensions": ["mqy"] + }, + "application/vnd.mobius.msl": { + "source": "iana", + "extensions": ["msl"] + }, + "application/vnd.mobius.plc": { + "source": "iana", + "extensions": ["plc"] + }, + "application/vnd.mobius.txf": { + "source": "iana", + "extensions": ["txf"] + }, + "application/vnd.mophun.application": { + "source": "iana", + "extensions": ["mpn"] + }, + "application/vnd.mophun.certificate": { + "source": "iana", + "extensions": ["mpc"] + }, + "application/vnd.motorola.flexsuite": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.adsi": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.fis": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.gotap": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.kmr": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.ttc": { + "source": "iana" + }, + "application/vnd.motorola.flexsuite.wem": { + "source": "iana" + }, + "application/vnd.motorola.iprm": { + "source": "iana" + }, + "application/vnd.mozilla.xul+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xul"] + }, + "application/vnd.ms-3mfdocument": { + "source": "iana" + }, + "application/vnd.ms-artgalry": { + "source": "iana", + "extensions": ["cil"] + }, + "application/vnd.ms-asf": { + "source": "iana" + }, + "application/vnd.ms-cab-compressed": { + "source": "iana", + "extensions": ["cab"] + }, + "application/vnd.ms-color.iccprofile": { + "source": "apache" + }, + "application/vnd.ms-excel": { + "source": "iana", + "compressible": false, + "extensions": ["xls","xlm","xla","xlc","xlt","xlw"] + }, + "application/vnd.ms-excel.addin.macroenabled.12": { + "source": "iana", + "extensions": ["xlam"] + }, + "application/vnd.ms-excel.sheet.binary.macroenabled.12": { + "source": "iana", + "extensions": ["xlsb"] + }, + "application/vnd.ms-excel.sheet.macroenabled.12": { + "source": "iana", + "extensions": ["xlsm"] + }, + "application/vnd.ms-excel.template.macroenabled.12": { + "source": "iana", + "extensions": ["xltm"] + }, + "application/vnd.ms-fontobject": { + "source": "iana", + "compressible": true, + "extensions": ["eot"] + }, + "application/vnd.ms-htmlhelp": { + "source": "iana", + "extensions": ["chm"] + }, + "application/vnd.ms-ims": { + "source": "iana", + "extensions": ["ims"] + }, + "application/vnd.ms-lrm": { + "source": "iana", + "extensions": ["lrm"] + }, + "application/vnd.ms-office.activex+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-officetheme": { + "source": "iana", + "extensions": ["thmx"] + }, + "application/vnd.ms-opentype": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-outlook": { + "compressible": false, + "extensions": ["msg"] + }, + "application/vnd.ms-package.obfuscated-opentype": { + "source": "apache" + }, + "application/vnd.ms-pki.seccat": { + "source": "apache", + "extensions": ["cat"] + }, + "application/vnd.ms-pki.stl": { + "source": "apache", + "extensions": ["stl"] + }, + "application/vnd.ms-playready.initiator+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-powerpoint": { + "source": "iana", + "compressible": false, + "extensions": ["ppt","pps","pot"] + }, + "application/vnd.ms-powerpoint.addin.macroenabled.12": { + "source": "iana", + "extensions": ["ppam"] + }, + "application/vnd.ms-powerpoint.presentation.macroenabled.12": { + "source": "iana", + "extensions": ["pptm"] + }, + "application/vnd.ms-powerpoint.slide.macroenabled.12": { + "source": "iana", + "extensions": ["sldm"] + }, + "application/vnd.ms-powerpoint.slideshow.macroenabled.12": { + "source": "iana", + "extensions": ["ppsm"] + }, + "application/vnd.ms-powerpoint.template.macroenabled.12": { + "source": "iana", + "extensions": ["potm"] + }, + "application/vnd.ms-printdevicecapabilities+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-printing.printticket+xml": { + "source": "apache", + "compressible": true + }, + "application/vnd.ms-printschematicket+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.ms-project": { + "source": "iana", + "extensions": ["mpp","mpt"] + }, + "application/vnd.ms-tnef": { + "source": "iana" + }, + "application/vnd.ms-windows.devicepairing": { + "source": "iana" + }, + "application/vnd.ms-windows.nwprinting.oob": { + "source": "iana" + }, + "application/vnd.ms-windows.printerpairing": { + "source": "iana" + }, + "application/vnd.ms-windows.wsd.oob": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.lic-resp": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-chlg-req": { + "source": "iana" + }, + "application/vnd.ms-wmdrm.meter-resp": { + "source": "iana" + }, + "application/vnd.ms-word.document.macroenabled.12": { + "source": "iana", + "extensions": ["docm"] + }, + "application/vnd.ms-word.template.macroenabled.12": { + "source": "iana", + "extensions": ["dotm"] + }, + "application/vnd.ms-works": { + "source": "iana", + "extensions": ["wps","wks","wcm","wdb"] + }, + "application/vnd.ms-wpl": { + "source": "iana", + "extensions": ["wpl"] + }, + "application/vnd.ms-xpsdocument": { + "source": "iana", + "compressible": false, + "extensions": ["xps"] + }, + "application/vnd.msa-disk-image": { + "source": "iana" + }, + "application/vnd.mseq": { + "source": "iana", + "extensions": ["mseq"] + }, + "application/vnd.msign": { + "source": "iana" + }, + "application/vnd.multiad.creator": { + "source": "iana" + }, + "application/vnd.multiad.creator.cif": { + "source": "iana" + }, + "application/vnd.music-niff": { + "source": "iana" + }, + "application/vnd.musician": { + "source": "iana", + "extensions": ["mus"] + }, + "application/vnd.muvee.style": { + "source": "iana", + "extensions": ["msty"] + }, + "application/vnd.mynfc": { + "source": "iana", + "extensions": ["taglet"] + }, + "application/vnd.nacamar.ybrid+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.ncd.control": { + "source": "iana" + }, + "application/vnd.ncd.reference": { + "source": "iana" + }, + "application/vnd.nearst.inv+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.nebumind.line": { + "source": "iana" + }, + "application/vnd.nervana": { + "source": "iana" + }, + "application/vnd.netfpx": { + "source": "iana" + }, + "application/vnd.neurolanguage.nlu": { + "source": "iana", + "extensions": ["nlu"] + }, + "application/vnd.nimn": { + "source": "iana" + }, + "application/vnd.nintendo.nitro.rom": { + "source": "iana" + }, + "application/vnd.nintendo.snes.rom": { + "source": "iana" + }, + "application/vnd.nitf": { + "source": "iana", + "extensions": ["ntf","nitf"] + }, + "application/vnd.noblenet-directory": { + "source": "iana", + "extensions": ["nnd"] + }, + "application/vnd.noblenet-sealer": { + "source": "iana", + "extensions": ["nns"] + }, + "application/vnd.noblenet-web": { + "source": "iana", + "extensions": ["nnw"] + }, + "application/vnd.nokia.catalogs": { + "source": "iana" + }, + "application/vnd.nokia.conml+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.conml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.iptv.config+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.isds-radio-presets": { + "source": "iana" + }, + "application/vnd.nokia.landmark+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.landmark+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.landmarkcollection+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.n-gage.ac+xml": { + "source": "iana", + "compressible": true, + "extensions": ["ac"] + }, + "application/vnd.nokia.n-gage.data": { + "source": "iana", + "extensions": ["ngdat"] + }, + "application/vnd.nokia.n-gage.symbian.install": { + "source": "iana", + "extensions": ["n-gage"] + }, + "application/vnd.nokia.ncd": { + "source": "iana" + }, + "application/vnd.nokia.pcd+wbxml": { + "source": "iana" + }, + "application/vnd.nokia.pcd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.nokia.radio-preset": { + "source": "iana", + "extensions": ["rpst"] + }, + "application/vnd.nokia.radio-presets": { + "source": "iana", + "extensions": ["rpss"] + }, + "application/vnd.novadigm.edm": { + "source": "iana", + "extensions": ["edm"] + }, + "application/vnd.novadigm.edx": { + "source": "iana", + "extensions": ["edx"] + }, + "application/vnd.novadigm.ext": { + "source": "iana", + "extensions": ["ext"] + }, + "application/vnd.ntt-local.content-share": { + "source": "iana" + }, + "application/vnd.ntt-local.file-transfer": { + "source": "iana" + }, + "application/vnd.ntt-local.ogw_remote-access": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_remote": { + "source": "iana" + }, + "application/vnd.ntt-local.sip-ta_tcp_stream": { + "source": "iana" + }, + "application/vnd.oasis.opendocument.chart": { + "source": "iana", + "extensions": ["odc"] + }, + "application/vnd.oasis.opendocument.chart-template": { + "source": "iana", + "extensions": ["otc"] + }, + "application/vnd.oasis.opendocument.database": { + "source": "iana", + "extensions": ["odb"] + }, + "application/vnd.oasis.opendocument.formula": { + "source": "iana", + "extensions": ["odf"] + }, + "application/vnd.oasis.opendocument.formula-template": { + "source": "iana", + "extensions": ["odft"] + }, + "application/vnd.oasis.opendocument.graphics": { + "source": "iana", + "compressible": false, + "extensions": ["odg"] + }, + "application/vnd.oasis.opendocument.graphics-template": { + "source": "iana", + "extensions": ["otg"] + }, + "application/vnd.oasis.opendocument.image": { + "source": "iana", + "extensions": ["odi"] + }, + "application/vnd.oasis.opendocument.image-template": { + "source": "iana", + "extensions": ["oti"] + }, + "application/vnd.oasis.opendocument.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["odp"] + }, + "application/vnd.oasis.opendocument.presentation-template": { + "source": "iana", + "extensions": ["otp"] + }, + "application/vnd.oasis.opendocument.spreadsheet": { + "source": "iana", + "compressible": false, + "extensions": ["ods"] + }, + "application/vnd.oasis.opendocument.spreadsheet-template": { + "source": "iana", + "extensions": ["ots"] + }, + "application/vnd.oasis.opendocument.text": { + "source": "iana", + "compressible": false, + "extensions": ["odt"] + }, + "application/vnd.oasis.opendocument.text-master": { + "source": "iana", + "extensions": ["odm"] + }, + "application/vnd.oasis.opendocument.text-template": { + "source": "iana", + "extensions": ["ott"] + }, + "application/vnd.oasis.opendocument.text-web": { + "source": "iana", + "extensions": ["oth"] + }, + "application/vnd.obn": { + "source": "iana" + }, + "application/vnd.ocf+cbor": { + "source": "iana" + }, + "application/vnd.oci.image.manifest.v1+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oftn.l10n+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessdownload+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.contentaccessstreaming+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.cspg-hexbinary": { + "source": "iana" + }, + "application/vnd.oipf.dae.svg+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.dae.xhtml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.mippvcontrolmessage+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.pae.gem": { + "source": "iana" + }, + "application/vnd.oipf.spdiscovery+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.spdlist+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.ueprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oipf.userprofile+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.olpc-sugar": { + "source": "iana", + "extensions": ["xo"] + }, + "application/vnd.oma-scws-config": { + "source": "iana" + }, + "application/vnd.oma-scws-http-request": { + "source": "iana" + }, + "application/vnd.oma-scws-http-response": { + "source": "iana" + }, + "application/vnd.oma.bcast.associated-procedure-parameter+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.drm-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.imd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.ltkm": { + "source": "iana" + }, + "application/vnd.oma.bcast.notification+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.provisioningtrigger": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgboot": { + "source": "iana" + }, + "application/vnd.oma.bcast.sgdd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sgdu": { + "source": "iana" + }, + "application/vnd.oma.bcast.simple-symbol-container": { + "source": "iana" + }, + "application/vnd.oma.bcast.smartcard-trigger+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.sprov+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.bcast.stkm": { + "source": "iana" + }, + "application/vnd.oma.cab-address-book+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-feature-handler+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-pcc+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-subs-invite+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.cab-user-prefs+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.dcd": { + "source": "iana" + }, + "application/vnd.oma.dcdc": { + "source": "iana" + }, + "application/vnd.oma.dd2+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dd2"] + }, + "application/vnd.oma.drm.risd+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.group-usage-list+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+cbor": { + "source": "iana" + }, + "application/vnd.oma.lwm2m+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.lwm2m+tlv": { + "source": "iana" + }, + "application/vnd.oma.pal+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.detailed-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.final-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.groups+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.invocation-descriptor+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.poc.optimized-progress-report+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.push": { + "source": "iana" + }, + "application/vnd.oma.scidm.messages+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oma.xcap-directory+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.omads-email+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-file+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omads-folder+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.omaloc-supl-init": { + "source": "iana" + }, + "application/vnd.onepager": { + "source": "iana" + }, + "application/vnd.onepagertamp": { + "source": "iana" + }, + "application/vnd.onepagertamx": { + "source": "iana" + }, + "application/vnd.onepagertat": { + "source": "iana" + }, + "application/vnd.onepagertatp": { + "source": "iana" + }, + "application/vnd.onepagertatx": { + "source": "iana" + }, + "application/vnd.openblox.game+xml": { + "source": "iana", + "compressible": true, + "extensions": ["obgx"] + }, + "application/vnd.openblox.game-binary": { + "source": "iana" + }, + "application/vnd.openeye.oeb": { + "source": "iana" + }, + "application/vnd.openofficeorg.extension": { + "source": "apache", + "extensions": ["oxt"] + }, + "application/vnd.openstreetmap.data+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osm"] + }, + "application/vnd.opentimestamps.ots": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.custom-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.customxmlproperties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawing+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chart+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.extended-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation": { + "source": "iana", + "compressible": false, + "extensions": ["pptx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.presprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide": { + "source": "iana", + "extensions": ["sldx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slide+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow": { + "source": "iana", + "extensions": ["ppsx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.tags+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.template": { + "source": "iana", + "extensions": ["potx"] + }, + "application/vnd.openxmlformats-officedocument.presentationml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": { + "source": "iana", + "compressible": false, + "extensions": ["xlsx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template": { + "source": "iana", + "extensions": ["xltx"] + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.theme+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.themeoverride+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.vmldrawing": { + "source": "iana" + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document": { + "source": "iana", + "compressible": false, + "extensions": ["docx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template": { + "source": "iana", + "extensions": ["dotx"] + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.core-properties+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.openxmlformats-package.relationships+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oracle.resource+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.orange.indata": { + "source": "iana" + }, + "application/vnd.osa.netdeploy": { + "source": "iana" + }, + "application/vnd.osgeo.mapguide.package": { + "source": "iana", + "extensions": ["mgp"] + }, + "application/vnd.osgi.bundle": { + "source": "iana" + }, + "application/vnd.osgi.dp": { + "source": "iana", + "extensions": ["dp"] + }, + "application/vnd.osgi.subsystem": { + "source": "iana", + "extensions": ["esa"] + }, + "application/vnd.otps.ct-kip+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.oxli.countgraph": { + "source": "iana" + }, + "application/vnd.pagerduty+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.palm": { + "source": "iana", + "extensions": ["pdb","pqa","oprc"] + }, + "application/vnd.panoply": { + "source": "iana" + }, + "application/vnd.paos.xml": { + "source": "iana" + }, + "application/vnd.patentdive": { + "source": "iana" + }, + "application/vnd.patientecommsdoc": { + "source": "iana" + }, + "application/vnd.pawaafile": { + "source": "iana", + "extensions": ["paw"] + }, + "application/vnd.pcos": { + "source": "iana" + }, + "application/vnd.pg.format": { + "source": "iana", + "extensions": ["str"] + }, + "application/vnd.pg.osasli": { + "source": "iana", + "extensions": ["ei6"] + }, + "application/vnd.piaccess.application-licence": { + "source": "iana" + }, + "application/vnd.picsel": { + "source": "iana", + "extensions": ["efif"] + }, + "application/vnd.pmi.widget": { + "source": "iana", + "extensions": ["wg"] + }, + "application/vnd.poc.group-advertisement+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.pocketlearn": { + "source": "iana", + "extensions": ["plf"] + }, + "application/vnd.powerbuilder6": { + "source": "iana", + "extensions": ["pbd"] + }, + "application/vnd.powerbuilder6-s": { + "source": "iana" + }, + "application/vnd.powerbuilder7": { + "source": "iana" + }, + "application/vnd.powerbuilder7-s": { + "source": "iana" + }, + "application/vnd.powerbuilder75": { + "source": "iana" + }, + "application/vnd.powerbuilder75-s": { + "source": "iana" + }, + "application/vnd.preminet": { + "source": "iana" + }, + "application/vnd.previewsystems.box": { + "source": "iana", + "extensions": ["box"] + }, + "application/vnd.proteus.magazine": { + "source": "iana", + "extensions": ["mgz"] + }, + "application/vnd.psfs": { + "source": "iana" + }, + "application/vnd.publishare-delta-tree": { + "source": "iana", + "extensions": ["qps"] + }, + "application/vnd.pvi.ptid1": { + "source": "iana", + "extensions": ["ptid"] + }, + "application/vnd.pwg-multiplexed": { + "source": "iana" + }, + "application/vnd.pwg-xhtml-print+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.qualcomm.brew-app-res": { + "source": "iana" + }, + "application/vnd.quarantainenet": { + "source": "iana" + }, + "application/vnd.quark.quarkxpress": { + "source": "iana", + "extensions": ["qxd","qxt","qwd","qwt","qxl","qxb"] + }, + "application/vnd.quobject-quoxdocument": { + "source": "iana" + }, + "application/vnd.radisys.moml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-conn+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-audit-stream+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-conf+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-base+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-detect+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-fax-sendrecv+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-group+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-speech+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.radisys.msml-dialog-transform+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.rainstor.data": { + "source": "iana" + }, + "application/vnd.rapid": { + "source": "iana" + }, + "application/vnd.rar": { + "source": "iana", + "extensions": ["rar"] + }, + "application/vnd.realvnc.bed": { + "source": "iana", + "extensions": ["bed"] + }, + "application/vnd.recordare.musicxml": { + "source": "iana", + "extensions": ["mxl"] + }, + "application/vnd.recordare.musicxml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["musicxml"] + }, + "application/vnd.renlearn.rlprint": { + "source": "iana" + }, + "application/vnd.resilient.logic": { + "source": "iana" + }, + "application/vnd.restful+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.rig.cryptonote": { + "source": "iana", + "extensions": ["cryptonote"] + }, + "application/vnd.rim.cod": { + "source": "apache", + "extensions": ["cod"] + }, + "application/vnd.rn-realmedia": { + "source": "apache", + "extensions": ["rm"] + }, + "application/vnd.rn-realmedia-vbr": { + "source": "apache", + "extensions": ["rmvb"] + }, + "application/vnd.route66.link66+xml": { + "source": "iana", + "compressible": true, + "extensions": ["link66"] + }, + "application/vnd.rs-274x": { + "source": "iana" + }, + "application/vnd.ruckus.download": { + "source": "iana" + }, + "application/vnd.s3sms": { + "source": "iana" + }, + "application/vnd.sailingtracker.track": { + "source": "iana", + "extensions": ["st"] + }, + "application/vnd.sar": { + "source": "iana" + }, + "application/vnd.sbm.cid": { + "source": "iana" + }, + "application/vnd.sbm.mid2": { + "source": "iana" + }, + "application/vnd.scribus": { + "source": "iana" + }, + "application/vnd.sealed.3df": { + "source": "iana" + }, + "application/vnd.sealed.csf": { + "source": "iana" + }, + "application/vnd.sealed.doc": { + "source": "iana" + }, + "application/vnd.sealed.eml": { + "source": "iana" + }, + "application/vnd.sealed.mht": { + "source": "iana" + }, + "application/vnd.sealed.net": { + "source": "iana" + }, + "application/vnd.sealed.ppt": { + "source": "iana" + }, + "application/vnd.sealed.tiff": { + "source": "iana" + }, + "application/vnd.sealed.xls": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.html": { + "source": "iana" + }, + "application/vnd.sealedmedia.softseal.pdf": { + "source": "iana" + }, + "application/vnd.seemail": { + "source": "iana", + "extensions": ["see"] + }, + "application/vnd.seis+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.sema": { + "source": "iana", + "extensions": ["sema"] + }, + "application/vnd.semd": { + "source": "iana", + "extensions": ["semd"] + }, + "application/vnd.semf": { + "source": "iana", + "extensions": ["semf"] + }, + "application/vnd.shade-save-file": { + "source": "iana" + }, + "application/vnd.shana.informed.formdata": { + "source": "iana", + "extensions": ["ifm"] + }, + "application/vnd.shana.informed.formtemplate": { + "source": "iana", + "extensions": ["itp"] + }, + "application/vnd.shana.informed.interchange": { + "source": "iana", + "extensions": ["iif"] + }, + "application/vnd.shana.informed.package": { + "source": "iana", + "extensions": ["ipk"] + }, + "application/vnd.shootproof+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shopkick+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.shp": { + "source": "iana" + }, + "application/vnd.shx": { + "source": "iana" + }, + "application/vnd.sigrok.session": { + "source": "iana" + }, + "application/vnd.simtech-mindmapper": { + "source": "iana", + "extensions": ["twd","twds"] + }, + "application/vnd.siren+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.smaf": { + "source": "iana", + "extensions": ["mmf"] + }, + "application/vnd.smart.notebook": { + "source": "iana" + }, + "application/vnd.smart.teacher": { + "source": "iana", + "extensions": ["teacher"] + }, + "application/vnd.snesdev-page-table": { + "source": "iana" + }, + "application/vnd.software602.filler.form+xml": { + "source": "iana", + "compressible": true, + "extensions": ["fo"] + }, + "application/vnd.software602.filler.form-xml-zip": { + "source": "iana" + }, + "application/vnd.solent.sdkm+xml": { + "source": "iana", + "compressible": true, + "extensions": ["sdkm","sdkd"] + }, + "application/vnd.spotfire.dxp": { + "source": "iana", + "extensions": ["dxp"] + }, + "application/vnd.spotfire.sfs": { + "source": "iana", + "extensions": ["sfs"] + }, + "application/vnd.sqlite3": { + "source": "iana" + }, + "application/vnd.sss-cod": { + "source": "iana" + }, + "application/vnd.sss-dtf": { + "source": "iana" + }, + "application/vnd.sss-ntf": { + "source": "iana" + }, + "application/vnd.stardivision.calc": { + "source": "apache", + "extensions": ["sdc"] + }, + "application/vnd.stardivision.draw": { + "source": "apache", + "extensions": ["sda"] + }, + "application/vnd.stardivision.impress": { + "source": "apache", + "extensions": ["sdd"] + }, + "application/vnd.stardivision.math": { + "source": "apache", + "extensions": ["smf"] + }, + "application/vnd.stardivision.writer": { + "source": "apache", + "extensions": ["sdw","vor"] + }, + "application/vnd.stardivision.writer-global": { + "source": "apache", + "extensions": ["sgl"] + }, + "application/vnd.stepmania.package": { + "source": "iana", + "extensions": ["smzip"] + }, + "application/vnd.stepmania.stepchart": { + "source": "iana", + "extensions": ["sm"] + }, + "application/vnd.street-stream": { + "source": "iana" + }, + "application/vnd.sun.wadl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wadl"] + }, + "application/vnd.sun.xml.calc": { + "source": "apache", + "extensions": ["sxc"] + }, + "application/vnd.sun.xml.calc.template": { + "source": "apache", + "extensions": ["stc"] + }, + "application/vnd.sun.xml.draw": { + "source": "apache", + "extensions": ["sxd"] + }, + "application/vnd.sun.xml.draw.template": { + "source": "apache", + "extensions": ["std"] + }, + "application/vnd.sun.xml.impress": { + "source": "apache", + "extensions": ["sxi"] + }, + "application/vnd.sun.xml.impress.template": { + "source": "apache", + "extensions": ["sti"] + }, + "application/vnd.sun.xml.math": { + "source": "apache", + "extensions": ["sxm"] + }, + "application/vnd.sun.xml.writer": { + "source": "apache", + "extensions": ["sxw"] + }, + "application/vnd.sun.xml.writer.global": { + "source": "apache", + "extensions": ["sxg"] + }, + "application/vnd.sun.xml.writer.template": { + "source": "apache", + "extensions": ["stw"] + }, + "application/vnd.sus-calendar": { + "source": "iana", + "extensions": ["sus","susp"] + }, + "application/vnd.svd": { + "source": "iana", + "extensions": ["svd"] + }, + "application/vnd.swiftview-ics": { + "source": "iana" + }, + "application/vnd.sycle+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.syft+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.symbian.install": { + "source": "apache", + "extensions": ["sis","sisx"] + }, + "application/vnd.syncml+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xsm"] + }, + "application/vnd.syncml.dm+wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["bdm"] + }, + "application/vnd.syncml.dm+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["xdm"] + }, + "application/vnd.syncml.dm.notification": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmddf+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["ddf"] + }, + "application/vnd.syncml.dmtnds+wbxml": { + "source": "iana" + }, + "application/vnd.syncml.dmtnds+xml": { + "source": "iana", + "charset": "UTF-8", + "compressible": true + }, + "application/vnd.syncml.ds.notification": { + "source": "iana" + }, + "application/vnd.tableschema+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tao.intent-module-archive": { + "source": "iana", + "extensions": ["tao"] + }, + "application/vnd.tcpdump.pcap": { + "source": "iana", + "extensions": ["pcap","cap","dmp"] + }, + "application/vnd.think-cell.ppttc+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.tmd.mediaflex.api+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.tml": { + "source": "iana" + }, + "application/vnd.tmobile-livetv": { + "source": "iana", + "extensions": ["tmo"] + }, + "application/vnd.tri.onesource": { + "source": "iana" + }, + "application/vnd.trid.tpt": { + "source": "iana", + "extensions": ["tpt"] + }, + "application/vnd.triscape.mxs": { + "source": "iana", + "extensions": ["mxs"] + }, + "application/vnd.trueapp": { + "source": "iana", + "extensions": ["tra"] + }, + "application/vnd.truedoc": { + "source": "iana" + }, + "application/vnd.ubisoft.webplayer": { + "source": "iana" + }, + "application/vnd.ufdl": { + "source": "iana", + "extensions": ["ufd","ufdl"] + }, + "application/vnd.uiq.theme": { + "source": "iana", + "extensions": ["utz"] + }, + "application/vnd.umajin": { + "source": "iana", + "extensions": ["umj"] + }, + "application/vnd.unity": { + "source": "iana", + "extensions": ["unityweb"] + }, + "application/vnd.uoml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["uoml"] + }, + "application/vnd.uplanet.alert": { + "source": "iana" + }, + "application/vnd.uplanet.alert-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice": { + "source": "iana" + }, + "application/vnd.uplanet.bearer-choice-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop": { + "source": "iana" + }, + "application/vnd.uplanet.cacheop-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.channel": { + "source": "iana" + }, + "application/vnd.uplanet.channel-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.list": { + "source": "iana" + }, + "application/vnd.uplanet.list-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd": { + "source": "iana" + }, + "application/vnd.uplanet.listcmd-wbxml": { + "source": "iana" + }, + "application/vnd.uplanet.signal": { + "source": "iana" + }, + "application/vnd.uri-map": { + "source": "iana" + }, + "application/vnd.valve.source.material": { + "source": "iana" + }, + "application/vnd.vcx": { + "source": "iana", + "extensions": ["vcx"] + }, + "application/vnd.vd-study": { + "source": "iana" + }, + "application/vnd.vectorworks": { + "source": "iana" + }, + "application/vnd.vel+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.verimatrix.vcas": { + "source": "iana" + }, + "application/vnd.veritone.aion+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.veryant.thin": { + "source": "iana" + }, + "application/vnd.ves.encrypted": { + "source": "iana" + }, + "application/vnd.vidsoft.vidconference": { + "source": "iana" + }, + "application/vnd.visio": { + "source": "iana", + "extensions": ["vsd","vst","vss","vsw"] + }, + "application/vnd.visionary": { + "source": "iana", + "extensions": ["vis"] + }, + "application/vnd.vividence.scriptfile": { + "source": "iana" + }, + "application/vnd.vsf": { + "source": "iana", + "extensions": ["vsf"] + }, + "application/vnd.wap.sic": { + "source": "iana" + }, + "application/vnd.wap.slc": { + "source": "iana" + }, + "application/vnd.wap.wbxml": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["wbxml"] + }, + "application/vnd.wap.wmlc": { + "source": "iana", + "extensions": ["wmlc"] + }, + "application/vnd.wap.wmlscriptc": { + "source": "iana", + "extensions": ["wmlsc"] + }, + "application/vnd.webturbo": { + "source": "iana", + "extensions": ["wtb"] + }, + "application/vnd.wfa.dpp": { + "source": "iana" + }, + "application/vnd.wfa.p2p": { + "source": "iana" + }, + "application/vnd.wfa.wsc": { + "source": "iana" + }, + "application/vnd.windows.devicepairing": { + "source": "iana" + }, + "application/vnd.wmc": { + "source": "iana" + }, + "application/vnd.wmf.bootstrap": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica": { + "source": "iana" + }, + "application/vnd.wolfram.mathematica.package": { + "source": "iana" + }, + "application/vnd.wolfram.player": { + "source": "iana", + "extensions": ["nbp"] + }, + "application/vnd.wordperfect": { + "source": "iana", + "extensions": ["wpd"] + }, + "application/vnd.wqd": { + "source": "iana", + "extensions": ["wqd"] + }, + "application/vnd.wrq-hp3000-labelled": { + "source": "iana" + }, + "application/vnd.wt.stf": { + "source": "iana", + "extensions": ["stf"] + }, + "application/vnd.wv.csp+wbxml": { + "source": "iana" + }, + "application/vnd.wv.csp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.wv.ssp+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xacml+json": { + "source": "iana", + "compressible": true + }, + "application/vnd.xara": { + "source": "iana", + "extensions": ["xar"] + }, + "application/vnd.xfdl": { + "source": "iana", + "extensions": ["xfdl"] + }, + "application/vnd.xfdl.webform": { + "source": "iana" + }, + "application/vnd.xmi+xml": { + "source": "iana", + "compressible": true + }, + "application/vnd.xmpie.cpkg": { + "source": "iana" + }, + "application/vnd.xmpie.dpkg": { + "source": "iana" + }, + "application/vnd.xmpie.plan": { + "source": "iana" + }, + "application/vnd.xmpie.ppkg": { + "source": "iana" + }, + "application/vnd.xmpie.xlim": { + "source": "iana" + }, + "application/vnd.yamaha.hv-dic": { + "source": "iana", + "extensions": ["hvd"] + }, + "application/vnd.yamaha.hv-script": { + "source": "iana", + "extensions": ["hvs"] + }, + "application/vnd.yamaha.hv-voice": { + "source": "iana", + "extensions": ["hvp"] + }, + "application/vnd.yamaha.openscoreformat": { + "source": "iana", + "extensions": ["osf"] + }, + "application/vnd.yamaha.openscoreformat.osfpvg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["osfpvg"] + }, + "application/vnd.yamaha.remote-setup": { + "source": "iana" + }, + "application/vnd.yamaha.smaf-audio": { + "source": "iana", + "extensions": ["saf"] + }, + "application/vnd.yamaha.smaf-phrase": { + "source": "iana", + "extensions": ["spf"] + }, + "application/vnd.yamaha.through-ngn": { + "source": "iana" + }, + "application/vnd.yamaha.tunnel-udpencap": { + "source": "iana" + }, + "application/vnd.yaoweme": { + "source": "iana" + }, + "application/vnd.yellowriver-custom-menu": { + "source": "iana", + "extensions": ["cmp"] + }, + "application/vnd.youtube.yt": { + "source": "iana" + }, + "application/vnd.zul": { + "source": "iana", + "extensions": ["zir","zirz"] + }, + "application/vnd.zzazz.deck+xml": { + "source": "iana", + "compressible": true, + "extensions": ["zaz"] + }, + "application/voicexml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["vxml"] + }, + "application/voucher-cms+json": { + "source": "iana", + "compressible": true + }, + "application/vq-rtcpxr": { + "source": "iana" + }, + "application/wasm": { + "source": "iana", + "compressible": true, + "extensions": ["wasm"] + }, + "application/watcherinfo+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wif"] + }, + "application/webpush-options+json": { + "source": "iana", + "compressible": true + }, + "application/whoispp-query": { + "source": "iana" + }, + "application/whoispp-response": { + "source": "iana" + }, + "application/widget": { + "source": "iana", + "extensions": ["wgt"] + }, + "application/winhlp": { + "source": "apache", + "extensions": ["hlp"] + }, + "application/wita": { + "source": "iana" + }, + "application/wordperfect5.1": { + "source": "iana" + }, + "application/wsdl+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wsdl"] + }, + "application/wspolicy+xml": { + "source": "iana", + "compressible": true, + "extensions": ["wspolicy"] + }, + "application/x-7z-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["7z"] + }, + "application/x-abiword": { + "source": "apache", + "extensions": ["abw"] + }, + "application/x-ace-compressed": { + "source": "apache", + "extensions": ["ace"] + }, + "application/x-amf": { + "source": "apache" + }, + "application/x-apple-diskimage": { + "source": "apache", + "extensions": ["dmg"] + }, + "application/x-arj": { + "compressible": false, + "extensions": ["arj"] + }, + "application/x-authorware-bin": { + "source": "apache", + "extensions": ["aab","x32","u32","vox"] + }, + "application/x-authorware-map": { + "source": "apache", + "extensions": ["aam"] + }, + "application/x-authorware-seg": { + "source": "apache", + "extensions": ["aas"] + }, + "application/x-bcpio": { + "source": "apache", + "extensions": ["bcpio"] + }, + "application/x-bdoc": { + "compressible": false, + "extensions": ["bdoc"] + }, + "application/x-bittorrent": { + "source": "apache", + "extensions": ["torrent"] + }, + "application/x-blorb": { + "source": "apache", + "extensions": ["blb","blorb"] + }, + "application/x-bzip": { + "source": "apache", + "compressible": false, + "extensions": ["bz"] + }, + "application/x-bzip2": { + "source": "apache", + "compressible": false, + "extensions": ["bz2","boz"] + }, + "application/x-cbr": { + "source": "apache", + "extensions": ["cbr","cba","cbt","cbz","cb7"] + }, + "application/x-cdlink": { + "source": "apache", + "extensions": ["vcd"] + }, + "application/x-cfs-compressed": { + "source": "apache", + "extensions": ["cfs"] + }, + "application/x-chat": { + "source": "apache", + "extensions": ["chat"] + }, + "application/x-chess-pgn": { + "source": "apache", + "extensions": ["pgn"] + }, + "application/x-chrome-extension": { + "extensions": ["crx"] + }, + "application/x-cocoa": { + "source": "nginx", + "extensions": ["cco"] + }, + "application/x-compress": { + "source": "apache" + }, + "application/x-conference": { + "source": "apache", + "extensions": ["nsc"] + }, + "application/x-cpio": { + "source": "apache", + "extensions": ["cpio"] + }, + "application/x-csh": { + "source": "apache", + "extensions": ["csh"] + }, + "application/x-deb": { + "compressible": false + }, + "application/x-debian-package": { + "source": "apache", + "extensions": ["deb","udeb"] + }, + "application/x-dgc-compressed": { + "source": "apache", + "extensions": ["dgc"] + }, + "application/x-director": { + "source": "apache", + "extensions": ["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"] + }, + "application/x-doom": { + "source": "apache", + "extensions": ["wad"] + }, + "application/x-dtbncx+xml": { + "source": "apache", + "compressible": true, + "extensions": ["ncx"] + }, + "application/x-dtbook+xml": { + "source": "apache", + "compressible": true, + "extensions": ["dtb"] + }, + "application/x-dtbresource+xml": { + "source": "apache", + "compressible": true, + "extensions": ["res"] + }, + "application/x-dvi": { + "source": "apache", + "compressible": false, + "extensions": ["dvi"] + }, + "application/x-envoy": { + "source": "apache", + "extensions": ["evy"] + }, + "application/x-eva": { + "source": "apache", + "extensions": ["eva"] + }, + "application/x-font-bdf": { + "source": "apache", + "extensions": ["bdf"] + }, + "application/x-font-dos": { + "source": "apache" + }, + "application/x-font-framemaker": { + "source": "apache" + }, + "application/x-font-ghostscript": { + "source": "apache", + "extensions": ["gsf"] + }, + "application/x-font-libgrx": { + "source": "apache" + }, + "application/x-font-linux-psf": { + "source": "apache", + "extensions": ["psf"] + }, + "application/x-font-pcf": { + "source": "apache", + "extensions": ["pcf"] + }, + "application/x-font-snf": { + "source": "apache", + "extensions": ["snf"] + }, + "application/x-font-speedo": { + "source": "apache" + }, + "application/x-font-sunos-news": { + "source": "apache" + }, + "application/x-font-type1": { + "source": "apache", + "extensions": ["pfa","pfb","pfm","afm"] + }, + "application/x-font-vfont": { + "source": "apache" + }, + "application/x-freearc": { + "source": "apache", + "extensions": ["arc"] + }, + "application/x-futuresplash": { + "source": "apache", + "extensions": ["spl"] + }, + "application/x-gca-compressed": { + "source": "apache", + "extensions": ["gca"] + }, + "application/x-glulx": { + "source": "apache", + "extensions": ["ulx"] + }, + "application/x-gnumeric": { + "source": "apache", + "extensions": ["gnumeric"] + }, + "application/x-gramps-xml": { + "source": "apache", + "extensions": ["gramps"] + }, + "application/x-gtar": { + "source": "apache", + "extensions": ["gtar"] + }, + "application/x-gzip": { + "source": "apache" + }, + "application/x-hdf": { + "source": "apache", + "extensions": ["hdf"] + }, + "application/x-httpd-php": { + "compressible": true, + "extensions": ["php"] + }, + "application/x-install-instructions": { + "source": "apache", + "extensions": ["install"] + }, + "application/x-iso9660-image": { + "source": "apache", + "extensions": ["iso"] + }, + "application/x-iwork-keynote-sffkey": { + "extensions": ["key"] + }, + "application/x-iwork-numbers-sffnumbers": { + "extensions": ["numbers"] + }, + "application/x-iwork-pages-sffpages": { + "extensions": ["pages"] + }, + "application/x-java-archive-diff": { + "source": "nginx", + "extensions": ["jardiff"] + }, + "application/x-java-jnlp-file": { + "source": "apache", + "compressible": false, + "extensions": ["jnlp"] + }, + "application/x-javascript": { + "compressible": true + }, + "application/x-keepass2": { + "extensions": ["kdbx"] + }, + "application/x-latex": { + "source": "apache", + "compressible": false, + "extensions": ["latex"] + }, + "application/x-lua-bytecode": { + "extensions": ["luac"] + }, + "application/x-lzh-compressed": { + "source": "apache", + "extensions": ["lzh","lha"] + }, + "application/x-makeself": { + "source": "nginx", + "extensions": ["run"] + }, + "application/x-mie": { + "source": "apache", + "extensions": ["mie"] + }, + "application/x-mobipocket-ebook": { + "source": "apache", + "extensions": ["prc","mobi"] + }, + "application/x-mpegurl": { + "compressible": false + }, + "application/x-ms-application": { + "source": "apache", + "extensions": ["application"] + }, + "application/x-ms-shortcut": { + "source": "apache", + "extensions": ["lnk"] + }, + "application/x-ms-wmd": { + "source": "apache", + "extensions": ["wmd"] + }, + "application/x-ms-wmz": { + "source": "apache", + "extensions": ["wmz"] + }, + "application/x-ms-xbap": { + "source": "apache", + "extensions": ["xbap"] + }, + "application/x-msaccess": { + "source": "apache", + "extensions": ["mdb"] + }, + "application/x-msbinder": { + "source": "apache", + "extensions": ["obd"] + }, + "application/x-mscardfile": { + "source": "apache", + "extensions": ["crd"] + }, + "application/x-msclip": { + "source": "apache", + "extensions": ["clp"] + }, + "application/x-msdos-program": { + "extensions": ["exe"] + }, + "application/x-msdownload": { + "source": "apache", + "extensions": ["exe","dll","com","bat","msi"] + }, + "application/x-msmediaview": { + "source": "apache", + "extensions": ["mvb","m13","m14"] + }, + "application/x-msmetafile": { + "source": "apache", + "extensions": ["wmf","wmz","emf","emz"] + }, + "application/x-msmoney": { + "source": "apache", + "extensions": ["mny"] + }, + "application/x-mspublisher": { + "source": "apache", + "extensions": ["pub"] + }, + "application/x-msschedule": { + "source": "apache", + "extensions": ["scd"] + }, + "application/x-msterminal": { + "source": "apache", + "extensions": ["trm"] + }, + "application/x-mswrite": { + "source": "apache", + "extensions": ["wri"] + }, + "application/x-netcdf": { + "source": "apache", + "extensions": ["nc","cdf"] + }, + "application/x-ns-proxy-autoconfig": { + "compressible": true, + "extensions": ["pac"] + }, + "application/x-nzb": { + "source": "apache", + "extensions": ["nzb"] + }, + "application/x-perl": { + "source": "nginx", + "extensions": ["pl","pm"] + }, + "application/x-pilot": { + "source": "nginx", + "extensions": ["prc","pdb"] + }, + "application/x-pkcs12": { + "source": "apache", + "compressible": false, + "extensions": ["p12","pfx"] + }, + "application/x-pkcs7-certificates": { + "source": "apache", + "extensions": ["p7b","spc"] + }, + "application/x-pkcs7-certreqresp": { + "source": "apache", + "extensions": ["p7r"] + }, + "application/x-pki-message": { + "source": "iana" + }, + "application/x-rar-compressed": { + "source": "apache", + "compressible": false, + "extensions": ["rar"] + }, + "application/x-redhat-package-manager": { + "source": "nginx", + "extensions": ["rpm"] + }, + "application/x-research-info-systems": { + "source": "apache", + "extensions": ["ris"] + }, + "application/x-sea": { + "source": "nginx", + "extensions": ["sea"] + }, + "application/x-sh": { + "source": "apache", + "compressible": true, + "extensions": ["sh"] + }, + "application/x-shar": { + "source": "apache", + "extensions": ["shar"] + }, + "application/x-shockwave-flash": { + "source": "apache", + "compressible": false, + "extensions": ["swf"] + }, + "application/x-silverlight-app": { + "source": "apache", + "extensions": ["xap"] + }, + "application/x-sql": { + "source": "apache", + "extensions": ["sql"] + }, + "application/x-stuffit": { + "source": "apache", + "compressible": false, + "extensions": ["sit"] + }, + "application/x-stuffitx": { + "source": "apache", + "extensions": ["sitx"] + }, + "application/x-subrip": { + "source": "apache", + "extensions": ["srt"] + }, + "application/x-sv4cpio": { + "source": "apache", + "extensions": ["sv4cpio"] + }, + "application/x-sv4crc": { + "source": "apache", + "extensions": ["sv4crc"] + }, + "application/x-t3vm-image": { + "source": "apache", + "extensions": ["t3"] + }, + "application/x-tads": { + "source": "apache", + "extensions": ["gam"] + }, + "application/x-tar": { + "source": "apache", + "compressible": true, + "extensions": ["tar"] + }, + "application/x-tcl": { + "source": "apache", + "extensions": ["tcl","tk"] + }, + "application/x-tex": { + "source": "apache", + "extensions": ["tex"] + }, + "application/x-tex-tfm": { + "source": "apache", + "extensions": ["tfm"] + }, + "application/x-texinfo": { + "source": "apache", + "extensions": ["texinfo","texi"] + }, + "application/x-tgif": { + "source": "apache", + "extensions": ["obj"] + }, + "application/x-ustar": { + "source": "apache", + "extensions": ["ustar"] + }, + "application/x-virtualbox-hdd": { + "compressible": true, + "extensions": ["hdd"] + }, + "application/x-virtualbox-ova": { + "compressible": true, + "extensions": ["ova"] + }, + "application/x-virtualbox-ovf": { + "compressible": true, + "extensions": ["ovf"] + }, + "application/x-virtualbox-vbox": { + "compressible": true, + "extensions": ["vbox"] + }, + "application/x-virtualbox-vbox-extpack": { + "compressible": false, + "extensions": ["vbox-extpack"] + }, + "application/x-virtualbox-vdi": { + "compressible": true, + "extensions": ["vdi"] + }, + "application/x-virtualbox-vhd": { + "compressible": true, + "extensions": ["vhd"] + }, + "application/x-virtualbox-vmdk": { + "compressible": true, + "extensions": ["vmdk"] + }, + "application/x-wais-source": { + "source": "apache", + "extensions": ["src"] + }, + "application/x-web-app-manifest+json": { + "compressible": true, + "extensions": ["webapp"] + }, + "application/x-www-form-urlencoded": { + "source": "iana", + "compressible": true + }, + "application/x-x509-ca-cert": { + "source": "iana", + "extensions": ["der","crt","pem"] + }, + "application/x-x509-ca-ra-cert": { + "source": "iana" + }, + "application/x-x509-next-ca-cert": { + "source": "iana" + }, + "application/x-xfig": { + "source": "apache", + "extensions": ["fig"] + }, + "application/x-xliff+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xlf"] + }, + "application/x-xpinstall": { + "source": "apache", + "compressible": false, + "extensions": ["xpi"] + }, + "application/x-xz": { + "source": "apache", + "extensions": ["xz"] + }, + "application/x-zmachine": { + "source": "apache", + "extensions": ["z1","z2","z3","z4","z5","z6","z7","z8"] + }, + "application/x400-bp": { + "source": "iana" + }, + "application/xacml+xml": { + "source": "iana", + "compressible": true + }, + "application/xaml+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xaml"] + }, + "application/xcap-att+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xav"] + }, + "application/xcap-caps+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xca"] + }, + "application/xcap-diff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xdf"] + }, + "application/xcap-el+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xel"] + }, + "application/xcap-error+xml": { + "source": "iana", + "compressible": true + }, + "application/xcap-ns+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xns"] + }, + "application/xcon-conference-info+xml": { + "source": "iana", + "compressible": true + }, + "application/xcon-conference-info-diff+xml": { + "source": "iana", + "compressible": true + }, + "application/xenc+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xenc"] + }, + "application/xhtml+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xhtml","xht"] + }, + "application/xhtml-voice+xml": { + "source": "apache", + "compressible": true + }, + "application/xliff+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xlf"] + }, + "application/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml","xsl","xsd","rng"] + }, + "application/xml-dtd": { + "source": "iana", + "compressible": true, + "extensions": ["dtd"] + }, + "application/xml-external-parsed-entity": { + "source": "iana" + }, + "application/xml-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/xmpp+xml": { + "source": "iana", + "compressible": true + }, + "application/xop+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xop"] + }, + "application/xproc+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xpl"] + }, + "application/xslt+xml": { + "source": "iana", + "compressible": true, + "extensions": ["xsl","xslt"] + }, + "application/xspf+xml": { + "source": "apache", + "compressible": true, + "extensions": ["xspf"] + }, + "application/xv+xml": { + "source": "iana", + "compressible": true, + "extensions": ["mxml","xhvml","xvml","xvm"] + }, + "application/yang": { + "source": "iana", + "extensions": ["yang"] + }, + "application/yang-data+json": { + "source": "iana", + "compressible": true + }, + "application/yang-data+xml": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+json": { + "source": "iana", + "compressible": true + }, + "application/yang-patch+xml": { + "source": "iana", + "compressible": true + }, + "application/yin+xml": { + "source": "iana", + "compressible": true, + "extensions": ["yin"] + }, + "application/zip": { + "source": "iana", + "compressible": false, + "extensions": ["zip"] + }, + "application/zlib": { + "source": "iana" + }, + "application/zstd": { + "source": "iana" + }, + "audio/1d-interleaved-parityfec": { + "source": "iana" + }, + "audio/32kadpcm": { + "source": "iana" + }, + "audio/3gpp": { + "source": "iana", + "compressible": false, + "extensions": ["3gpp"] + }, + "audio/3gpp2": { + "source": "iana" + }, + "audio/aac": { + "source": "iana" + }, + "audio/ac3": { + "source": "iana" + }, + "audio/adpcm": { + "source": "apache", + "extensions": ["adp"] + }, + "audio/amr": { + "source": "iana", + "extensions": ["amr"] + }, + "audio/amr-wb": { + "source": "iana" + }, + "audio/amr-wb+": { + "source": "iana" + }, + "audio/aptx": { + "source": "iana" + }, + "audio/asc": { + "source": "iana" + }, + "audio/atrac-advanced-lossless": { + "source": "iana" + }, + "audio/atrac-x": { + "source": "iana" + }, + "audio/atrac3": { + "source": "iana" + }, + "audio/basic": { + "source": "iana", + "compressible": false, + "extensions": ["au","snd"] + }, + "audio/bv16": { + "source": "iana" + }, + "audio/bv32": { + "source": "iana" + }, + "audio/clearmode": { + "source": "iana" + }, + "audio/cn": { + "source": "iana" + }, + "audio/dat12": { + "source": "iana" + }, + "audio/dls": { + "source": "iana" + }, + "audio/dsr-es201108": { + "source": "iana" + }, + "audio/dsr-es202050": { + "source": "iana" + }, + "audio/dsr-es202211": { + "source": "iana" + }, + "audio/dsr-es202212": { + "source": "iana" + }, + "audio/dv": { + "source": "iana" + }, + "audio/dvi4": { + "source": "iana" + }, + "audio/eac3": { + "source": "iana" + }, + "audio/encaprtp": { + "source": "iana" + }, + "audio/evrc": { + "source": "iana" + }, + "audio/evrc-qcp": { + "source": "iana" + }, + "audio/evrc0": { + "source": "iana" + }, + "audio/evrc1": { + "source": "iana" + }, + "audio/evrcb": { + "source": "iana" + }, + "audio/evrcb0": { + "source": "iana" + }, + "audio/evrcb1": { + "source": "iana" + }, + "audio/evrcnw": { + "source": "iana" + }, + "audio/evrcnw0": { + "source": "iana" + }, + "audio/evrcnw1": { + "source": "iana" + }, + "audio/evrcwb": { + "source": "iana" + }, + "audio/evrcwb0": { + "source": "iana" + }, + "audio/evrcwb1": { + "source": "iana" + }, + "audio/evs": { + "source": "iana" + }, + "audio/flexfec": { + "source": "iana" + }, + "audio/fwdred": { + "source": "iana" + }, + "audio/g711-0": { + "source": "iana" + }, + "audio/g719": { + "source": "iana" + }, + "audio/g722": { + "source": "iana" + }, + "audio/g7221": { + "source": "iana" + }, + "audio/g723": { + "source": "iana" + }, + "audio/g726-16": { + "source": "iana" + }, + "audio/g726-24": { + "source": "iana" + }, + "audio/g726-32": { + "source": "iana" + }, + "audio/g726-40": { + "source": "iana" + }, + "audio/g728": { + "source": "iana" + }, + "audio/g729": { + "source": "iana" + }, + "audio/g7291": { + "source": "iana" + }, + "audio/g729d": { + "source": "iana" + }, + "audio/g729e": { + "source": "iana" + }, + "audio/gsm": { + "source": "iana" + }, + "audio/gsm-efr": { + "source": "iana" + }, + "audio/gsm-hr-08": { + "source": "iana" + }, + "audio/ilbc": { + "source": "iana" + }, + "audio/ip-mr_v2.5": { + "source": "iana" + }, + "audio/isac": { + "source": "apache" + }, + "audio/l16": { + "source": "iana" + }, + "audio/l20": { + "source": "iana" + }, + "audio/l24": { + "source": "iana", + "compressible": false + }, + "audio/l8": { + "source": "iana" + }, + "audio/lpc": { + "source": "iana" + }, + "audio/melp": { + "source": "iana" + }, + "audio/melp1200": { + "source": "iana" + }, + "audio/melp2400": { + "source": "iana" + }, + "audio/melp600": { + "source": "iana" + }, + "audio/mhas": { + "source": "iana" + }, + "audio/midi": { + "source": "apache", + "extensions": ["mid","midi","kar","rmi"] + }, + "audio/mobile-xmf": { + "source": "iana", + "extensions": ["mxmf"] + }, + "audio/mp3": { + "compressible": false, + "extensions": ["mp3"] + }, + "audio/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["m4a","mp4a"] + }, + "audio/mp4a-latm": { + "source": "iana" + }, + "audio/mpa": { + "source": "iana" + }, + "audio/mpa-robust": { + "source": "iana" + }, + "audio/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpga","mp2","mp2a","mp3","m2a","m3a"] + }, + "audio/mpeg4-generic": { + "source": "iana" + }, + "audio/musepack": { + "source": "apache" + }, + "audio/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["oga","ogg","spx","opus"] + }, + "audio/opus": { + "source": "iana" + }, + "audio/parityfec": { + "source": "iana" + }, + "audio/pcma": { + "source": "iana" + }, + "audio/pcma-wb": { + "source": "iana" + }, + "audio/pcmu": { + "source": "iana" + }, + "audio/pcmu-wb": { + "source": "iana" + }, + "audio/prs.sid": { + "source": "iana" + }, + "audio/qcelp": { + "source": "iana" + }, + "audio/raptorfec": { + "source": "iana" + }, + "audio/red": { + "source": "iana" + }, + "audio/rtp-enc-aescm128": { + "source": "iana" + }, + "audio/rtp-midi": { + "source": "iana" + }, + "audio/rtploopback": { + "source": "iana" + }, + "audio/rtx": { + "source": "iana" + }, + "audio/s3m": { + "source": "apache", + "extensions": ["s3m"] + }, + "audio/scip": { + "source": "iana" + }, + "audio/silk": { + "source": "apache", + "extensions": ["sil"] + }, + "audio/smv": { + "source": "iana" + }, + "audio/smv-qcp": { + "source": "iana" + }, + "audio/smv0": { + "source": "iana" + }, + "audio/sofa": { + "source": "iana" + }, + "audio/sp-midi": { + "source": "iana" + }, + "audio/speex": { + "source": "iana" + }, + "audio/t140c": { + "source": "iana" + }, + "audio/t38": { + "source": "iana" + }, + "audio/telephone-event": { + "source": "iana" + }, + "audio/tetra_acelp": { + "source": "iana" + }, + "audio/tetra_acelp_bb": { + "source": "iana" + }, + "audio/tone": { + "source": "iana" + }, + "audio/tsvcis": { + "source": "iana" + }, + "audio/uemclip": { + "source": "iana" + }, + "audio/ulpfec": { + "source": "iana" + }, + "audio/usac": { + "source": "iana" + }, + "audio/vdvi": { + "source": "iana" + }, + "audio/vmr-wb": { + "source": "iana" + }, + "audio/vnd.3gpp.iufp": { + "source": "iana" + }, + "audio/vnd.4sb": { + "source": "iana" + }, + "audio/vnd.audiokoz": { + "source": "iana" + }, + "audio/vnd.celp": { + "source": "iana" + }, + "audio/vnd.cisco.nse": { + "source": "iana" + }, + "audio/vnd.cmles.radio-events": { + "source": "iana" + }, + "audio/vnd.cns.anp1": { + "source": "iana" + }, + "audio/vnd.cns.inf1": { + "source": "iana" + }, + "audio/vnd.dece.audio": { + "source": "iana", + "extensions": ["uva","uvva"] + }, + "audio/vnd.digital-winds": { + "source": "iana", + "extensions": ["eol"] + }, + "audio/vnd.dlna.adts": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.1": { + "source": "iana" + }, + "audio/vnd.dolby.heaac.2": { + "source": "iana" + }, + "audio/vnd.dolby.mlp": { + "source": "iana" + }, + "audio/vnd.dolby.mps": { + "source": "iana" + }, + "audio/vnd.dolby.pl2": { + "source": "iana" + }, + "audio/vnd.dolby.pl2x": { + "source": "iana" + }, + "audio/vnd.dolby.pl2z": { + "source": "iana" + }, + "audio/vnd.dolby.pulse.1": { + "source": "iana" + }, + "audio/vnd.dra": { + "source": "iana", + "extensions": ["dra"] + }, + "audio/vnd.dts": { + "source": "iana", + "extensions": ["dts"] + }, + "audio/vnd.dts.hd": { + "source": "iana", + "extensions": ["dtshd"] + }, + "audio/vnd.dts.uhd": { + "source": "iana" + }, + "audio/vnd.dvb.file": { + "source": "iana" + }, + "audio/vnd.everad.plj": { + "source": "iana" + }, + "audio/vnd.hns.audio": { + "source": "iana" + }, + "audio/vnd.lucent.voice": { + "source": "iana", + "extensions": ["lvp"] + }, + "audio/vnd.ms-playready.media.pya": { + "source": "iana", + "extensions": ["pya"] + }, + "audio/vnd.nokia.mobile-xmf": { + "source": "iana" + }, + "audio/vnd.nortel.vbk": { + "source": "iana" + }, + "audio/vnd.nuera.ecelp4800": { + "source": "iana", + "extensions": ["ecelp4800"] + }, + "audio/vnd.nuera.ecelp7470": { + "source": "iana", + "extensions": ["ecelp7470"] + }, + "audio/vnd.nuera.ecelp9600": { + "source": "iana", + "extensions": ["ecelp9600"] + }, + "audio/vnd.octel.sbc": { + "source": "iana" + }, + "audio/vnd.presonus.multitrack": { + "source": "iana" + }, + "audio/vnd.qcelp": { + "source": "iana" + }, + "audio/vnd.rhetorex.32kadpcm": { + "source": "iana" + }, + "audio/vnd.rip": { + "source": "iana", + "extensions": ["rip"] + }, + "audio/vnd.rn-realaudio": { + "compressible": false + }, + "audio/vnd.sealedmedia.softseal.mpeg": { + "source": "iana" + }, + "audio/vnd.vmx.cvsd": { + "source": "iana" + }, + "audio/vnd.wave": { + "compressible": false + }, + "audio/vorbis": { + "source": "iana", + "compressible": false + }, + "audio/vorbis-config": { + "source": "iana" + }, + "audio/wav": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/wave": { + "compressible": false, + "extensions": ["wav"] + }, + "audio/webm": { + "source": "apache", + "compressible": false, + "extensions": ["weba"] + }, + "audio/x-aac": { + "source": "apache", + "compressible": false, + "extensions": ["aac"] + }, + "audio/x-aiff": { + "source": "apache", + "extensions": ["aif","aiff","aifc"] + }, + "audio/x-caf": { + "source": "apache", + "compressible": false, + "extensions": ["caf"] + }, + "audio/x-flac": { + "source": "apache", + "extensions": ["flac"] + }, + "audio/x-m4a": { + "source": "nginx", + "extensions": ["m4a"] + }, + "audio/x-matroska": { + "source": "apache", + "extensions": ["mka"] + }, + "audio/x-mpegurl": { + "source": "apache", + "extensions": ["m3u"] + }, + "audio/x-ms-wax": { + "source": "apache", + "extensions": ["wax"] + }, + "audio/x-ms-wma": { + "source": "apache", + "extensions": ["wma"] + }, + "audio/x-pn-realaudio": { + "source": "apache", + "extensions": ["ram","ra"] + }, + "audio/x-pn-realaudio-plugin": { + "source": "apache", + "extensions": ["rmp"] + }, + "audio/x-realaudio": { + "source": "nginx", + "extensions": ["ra"] + }, + "audio/x-tta": { + "source": "apache" + }, + "audio/x-wav": { + "source": "apache", + "extensions": ["wav"] + }, + "audio/xm": { + "source": "apache", + "extensions": ["xm"] + }, + "chemical/x-cdx": { + "source": "apache", + "extensions": ["cdx"] + }, + "chemical/x-cif": { + "source": "apache", + "extensions": ["cif"] + }, + "chemical/x-cmdf": { + "source": "apache", + "extensions": ["cmdf"] + }, + "chemical/x-cml": { + "source": "apache", + "extensions": ["cml"] + }, + "chemical/x-csml": { + "source": "apache", + "extensions": ["csml"] + }, + "chemical/x-pdb": { + "source": "apache" + }, + "chemical/x-xyz": { + "source": "apache", + "extensions": ["xyz"] + }, + "font/collection": { + "source": "iana", + "extensions": ["ttc"] + }, + "font/otf": { + "source": "iana", + "compressible": true, + "extensions": ["otf"] + }, + "font/sfnt": { + "source": "iana" + }, + "font/ttf": { + "source": "iana", + "compressible": true, + "extensions": ["ttf"] + }, + "font/woff": { + "source": "iana", + "extensions": ["woff"] + }, + "font/woff2": { + "source": "iana", + "extensions": ["woff2"] + }, + "image/aces": { + "source": "iana", + "extensions": ["exr"] + }, + "image/apng": { + "compressible": false, + "extensions": ["apng"] + }, + "image/avci": { + "source": "iana", + "extensions": ["avci"] + }, + "image/avcs": { + "source": "iana", + "extensions": ["avcs"] + }, + "image/avif": { + "source": "iana", + "compressible": false, + "extensions": ["avif"] + }, + "image/bmp": { + "source": "iana", + "compressible": true, + "extensions": ["bmp"] + }, + "image/cgm": { + "source": "iana", + "extensions": ["cgm"] + }, + "image/dicom-rle": { + "source": "iana", + "extensions": ["drle"] + }, + "image/emf": { + "source": "iana", + "extensions": ["emf"] + }, + "image/fits": { + "source": "iana", + "extensions": ["fits"] + }, + "image/g3fax": { + "source": "iana", + "extensions": ["g3"] + }, + "image/gif": { + "source": "iana", + "compressible": false, + "extensions": ["gif"] + }, + "image/heic": { + "source": "iana", + "extensions": ["heic"] + }, + "image/heic-sequence": { + "source": "iana", + "extensions": ["heics"] + }, + "image/heif": { + "source": "iana", + "extensions": ["heif"] + }, + "image/heif-sequence": { + "source": "iana", + "extensions": ["heifs"] + }, + "image/hej2k": { + "source": "iana", + "extensions": ["hej2"] + }, + "image/hsj2": { + "source": "iana", + "extensions": ["hsj2"] + }, + "image/ief": { + "source": "iana", + "extensions": ["ief"] + }, + "image/jls": { + "source": "iana", + "extensions": ["jls"] + }, + "image/jp2": { + "source": "iana", + "compressible": false, + "extensions": ["jp2","jpg2"] + }, + "image/jpeg": { + "source": "iana", + "compressible": false, + "extensions": ["jpeg","jpg","jpe"] + }, + "image/jph": { + "source": "iana", + "extensions": ["jph"] + }, + "image/jphc": { + "source": "iana", + "extensions": ["jhc"] + }, + "image/jpm": { + "source": "iana", + "compressible": false, + "extensions": ["jpm"] + }, + "image/jpx": { + "source": "iana", + "compressible": false, + "extensions": ["jpx","jpf"] + }, + "image/jxr": { + "source": "iana", + "extensions": ["jxr"] + }, + "image/jxra": { + "source": "iana", + "extensions": ["jxra"] + }, + "image/jxrs": { + "source": "iana", + "extensions": ["jxrs"] + }, + "image/jxs": { + "source": "iana", + "extensions": ["jxs"] + }, + "image/jxsc": { + "source": "iana", + "extensions": ["jxsc"] + }, + "image/jxsi": { + "source": "iana", + "extensions": ["jxsi"] + }, + "image/jxss": { + "source": "iana", + "extensions": ["jxss"] + }, + "image/ktx": { + "source": "iana", + "extensions": ["ktx"] + }, + "image/ktx2": { + "source": "iana", + "extensions": ["ktx2"] + }, + "image/naplps": { + "source": "iana" + }, + "image/pjpeg": { + "compressible": false + }, + "image/png": { + "source": "iana", + "compressible": false, + "extensions": ["png"] + }, + "image/prs.btif": { + "source": "iana", + "extensions": ["btif"] + }, + "image/prs.pti": { + "source": "iana", + "extensions": ["pti"] + }, + "image/pwg-raster": { + "source": "iana" + }, + "image/sgi": { + "source": "apache", + "extensions": ["sgi"] + }, + "image/svg+xml": { + "source": "iana", + "compressible": true, + "extensions": ["svg","svgz"] + }, + "image/t38": { + "source": "iana", + "extensions": ["t38"] + }, + "image/tiff": { + "source": "iana", + "compressible": false, + "extensions": ["tif","tiff"] + }, + "image/tiff-fx": { + "source": "iana", + "extensions": ["tfx"] + }, + "image/vnd.adobe.photoshop": { + "source": "iana", + "compressible": true, + "extensions": ["psd"] + }, + "image/vnd.airzip.accelerator.azv": { + "source": "iana", + "extensions": ["azv"] + }, + "image/vnd.cns.inf2": { + "source": "iana" + }, + "image/vnd.dece.graphic": { + "source": "iana", + "extensions": ["uvi","uvvi","uvg","uvvg"] + }, + "image/vnd.djvu": { + "source": "iana", + "extensions": ["djvu","djv"] + }, + "image/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "image/vnd.dwg": { + "source": "iana", + "extensions": ["dwg"] + }, + "image/vnd.dxf": { + "source": "iana", + "extensions": ["dxf"] + }, + "image/vnd.fastbidsheet": { + "source": "iana", + "extensions": ["fbs"] + }, + "image/vnd.fpx": { + "source": "iana", + "extensions": ["fpx"] + }, + "image/vnd.fst": { + "source": "iana", + "extensions": ["fst"] + }, + "image/vnd.fujixerox.edmics-mmr": { + "source": "iana", + "extensions": ["mmr"] + }, + "image/vnd.fujixerox.edmics-rlc": { + "source": "iana", + "extensions": ["rlc"] + }, + "image/vnd.globalgraphics.pgb": { + "source": "iana" + }, + "image/vnd.microsoft.icon": { + "source": "iana", + "compressible": true, + "extensions": ["ico"] + }, + "image/vnd.mix": { + "source": "iana" + }, + "image/vnd.mozilla.apng": { + "source": "iana" + }, + "image/vnd.ms-dds": { + "compressible": true, + "extensions": ["dds"] + }, + "image/vnd.ms-modi": { + "source": "iana", + "extensions": ["mdi"] + }, + "image/vnd.ms-photo": { + "source": "apache", + "extensions": ["wdp"] + }, + "image/vnd.net-fpx": { + "source": "iana", + "extensions": ["npx"] + }, + "image/vnd.pco.b16": { + "source": "iana", + "extensions": ["b16"] + }, + "image/vnd.radiance": { + "source": "iana" + }, + "image/vnd.sealed.png": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.gif": { + "source": "iana" + }, + "image/vnd.sealedmedia.softseal.jpg": { + "source": "iana" + }, + "image/vnd.svf": { + "source": "iana" + }, + "image/vnd.tencent.tap": { + "source": "iana", + "extensions": ["tap"] + }, + "image/vnd.valve.source.texture": { + "source": "iana", + "extensions": ["vtf"] + }, + "image/vnd.wap.wbmp": { + "source": "iana", + "extensions": ["wbmp"] + }, + "image/vnd.xiff": { + "source": "iana", + "extensions": ["xif"] + }, + "image/vnd.zbrush.pcx": { + "source": "iana", + "extensions": ["pcx"] + }, + "image/webp": { + "source": "apache", + "extensions": ["webp"] + }, + "image/wmf": { + "source": "iana", + "extensions": ["wmf"] + }, + "image/x-3ds": { + "source": "apache", + "extensions": ["3ds"] + }, + "image/x-cmu-raster": { + "source": "apache", + "extensions": ["ras"] + }, + "image/x-cmx": { + "source": "apache", + "extensions": ["cmx"] + }, + "image/x-freehand": { + "source": "apache", + "extensions": ["fh","fhc","fh4","fh5","fh7"] + }, + "image/x-icon": { + "source": "apache", + "compressible": true, + "extensions": ["ico"] + }, + "image/x-jng": { + "source": "nginx", + "extensions": ["jng"] + }, + "image/x-mrsid-image": { + "source": "apache", + "extensions": ["sid"] + }, + "image/x-ms-bmp": { + "source": "nginx", + "compressible": true, + "extensions": ["bmp"] + }, + "image/x-pcx": { + "source": "apache", + "extensions": ["pcx"] + }, + "image/x-pict": { + "source": "apache", + "extensions": ["pic","pct"] + }, + "image/x-portable-anymap": { + "source": "apache", + "extensions": ["pnm"] + }, + "image/x-portable-bitmap": { + "source": "apache", + "extensions": ["pbm"] + }, + "image/x-portable-graymap": { + "source": "apache", + "extensions": ["pgm"] + }, + "image/x-portable-pixmap": { + "source": "apache", + "extensions": ["ppm"] + }, + "image/x-rgb": { + "source": "apache", + "extensions": ["rgb"] + }, + "image/x-tga": { + "source": "apache", + "extensions": ["tga"] + }, + "image/x-xbitmap": { + "source": "apache", + "extensions": ["xbm"] + }, + "image/x-xcf": { + "compressible": false + }, + "image/x-xpixmap": { + "source": "apache", + "extensions": ["xpm"] + }, + "image/x-xwindowdump": { + "source": "apache", + "extensions": ["xwd"] + }, + "message/cpim": { + "source": "iana" + }, + "message/delivery-status": { + "source": "iana" + }, + "message/disposition-notification": { + "source": "iana", + "extensions": [ + "disposition-notification" + ] + }, + "message/external-body": { + "source": "iana" + }, + "message/feedback-report": { + "source": "iana" + }, + "message/global": { + "source": "iana", + "extensions": ["u8msg"] + }, + "message/global-delivery-status": { + "source": "iana", + "extensions": ["u8dsn"] + }, + "message/global-disposition-notification": { + "source": "iana", + "extensions": ["u8mdn"] + }, + "message/global-headers": { + "source": "iana", + "extensions": ["u8hdr"] + }, + "message/http": { + "source": "iana", + "compressible": false + }, + "message/imdn+xml": { + "source": "iana", + "compressible": true + }, + "message/news": { + "source": "iana" + }, + "message/partial": { + "source": "iana", + "compressible": false + }, + "message/rfc822": { + "source": "iana", + "compressible": true, + "extensions": ["eml","mime"] + }, + "message/s-http": { + "source": "iana" + }, + "message/sip": { + "source": "iana" + }, + "message/sipfrag": { + "source": "iana" + }, + "message/tracking-status": { + "source": "iana" + }, + "message/vnd.si.simp": { + "source": "iana" + }, + "message/vnd.wfa.wsc": { + "source": "iana", + "extensions": ["wsc"] + }, + "model/3mf": { + "source": "iana", + "extensions": ["3mf"] + }, + "model/e57": { + "source": "iana" + }, + "model/gltf+json": { + "source": "iana", + "compressible": true, + "extensions": ["gltf"] + }, + "model/gltf-binary": { + "source": "iana", + "compressible": true, + "extensions": ["glb"] + }, + "model/iges": { + "source": "iana", + "compressible": false, + "extensions": ["igs","iges"] + }, + "model/mesh": { + "source": "iana", + "compressible": false, + "extensions": ["msh","mesh","silo"] + }, + "model/mtl": { + "source": "iana", + "extensions": ["mtl"] + }, + "model/obj": { + "source": "iana", + "extensions": ["obj"] + }, + "model/step": { + "source": "iana" + }, + "model/step+xml": { + "source": "iana", + "compressible": true, + "extensions": ["stpx"] + }, + "model/step+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpz"] + }, + "model/step-xml+zip": { + "source": "iana", + "compressible": false, + "extensions": ["stpxz"] + }, + "model/stl": { + "source": "iana", + "extensions": ["stl"] + }, + "model/vnd.collada+xml": { + "source": "iana", + "compressible": true, + "extensions": ["dae"] + }, + "model/vnd.dwf": { + "source": "iana", + "extensions": ["dwf"] + }, + "model/vnd.flatland.3dml": { + "source": "iana" + }, + "model/vnd.gdl": { + "source": "iana", + "extensions": ["gdl"] + }, + "model/vnd.gs-gdl": { + "source": "apache" + }, + "model/vnd.gs.gdl": { + "source": "iana" + }, + "model/vnd.gtw": { + "source": "iana", + "extensions": ["gtw"] + }, + "model/vnd.moml+xml": { + "source": "iana", + "compressible": true + }, + "model/vnd.mts": { + "source": "iana", + "extensions": ["mts"] + }, + "model/vnd.opengex": { + "source": "iana", + "extensions": ["ogex"] + }, + "model/vnd.parasolid.transmit.binary": { + "source": "iana", + "extensions": ["x_b"] + }, + "model/vnd.parasolid.transmit.text": { + "source": "iana", + "extensions": ["x_t"] + }, + "model/vnd.pytha.pyox": { + "source": "iana" + }, + "model/vnd.rosette.annotated-data-model": { + "source": "iana" + }, + "model/vnd.sap.vds": { + "source": "iana", + "extensions": ["vds"] + }, + "model/vnd.usdz+zip": { + "source": "iana", + "compressible": false, + "extensions": ["usdz"] + }, + "model/vnd.valve.source.compiled-map": { + "source": "iana", + "extensions": ["bsp"] + }, + "model/vnd.vtu": { + "source": "iana", + "extensions": ["vtu"] + }, + "model/vrml": { + "source": "iana", + "compressible": false, + "extensions": ["wrl","vrml"] + }, + "model/x3d+binary": { + "source": "apache", + "compressible": false, + "extensions": ["x3db","x3dbz"] + }, + "model/x3d+fastinfoset": { + "source": "iana", + "extensions": ["x3db"] + }, + "model/x3d+vrml": { + "source": "apache", + "compressible": false, + "extensions": ["x3dv","x3dvz"] + }, + "model/x3d+xml": { + "source": "iana", + "compressible": true, + "extensions": ["x3d","x3dz"] + }, + "model/x3d-vrml": { + "source": "iana", + "extensions": ["x3dv"] + }, + "multipart/alternative": { + "source": "iana", + "compressible": false + }, + "multipart/appledouble": { + "source": "iana" + }, + "multipart/byteranges": { + "source": "iana" + }, + "multipart/digest": { + "source": "iana" + }, + "multipart/encrypted": { + "source": "iana", + "compressible": false + }, + "multipart/form-data": { + "source": "iana", + "compressible": false + }, + "multipart/header-set": { + "source": "iana" + }, + "multipart/mixed": { + "source": "iana" + }, + "multipart/multilingual": { + "source": "iana" + }, + "multipart/parallel": { + "source": "iana" + }, + "multipart/related": { + "source": "iana", + "compressible": false + }, + "multipart/report": { + "source": "iana" + }, + "multipart/signed": { + "source": "iana", + "compressible": false + }, + "multipart/vnd.bint.med-plus": { + "source": "iana" + }, + "multipart/voice-message": { + "source": "iana" + }, + "multipart/x-mixed-replace": { + "source": "iana" + }, + "text/1d-interleaved-parityfec": { + "source": "iana" + }, + "text/cache-manifest": { + "source": "iana", + "compressible": true, + "extensions": ["appcache","manifest"] + }, + "text/calendar": { + "source": "iana", + "extensions": ["ics","ifb"] + }, + "text/calender": { + "compressible": true + }, + "text/cmd": { + "compressible": true + }, + "text/coffeescript": { + "extensions": ["coffee","litcoffee"] + }, + "text/cql": { + "source": "iana" + }, + "text/cql-expression": { + "source": "iana" + }, + "text/cql-identifier": { + "source": "iana" + }, + "text/css": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["css"] + }, + "text/csv": { + "source": "iana", + "compressible": true, + "extensions": ["csv"] + }, + "text/csv-schema": { + "source": "iana" + }, + "text/directory": { + "source": "iana" + }, + "text/dns": { + "source": "iana" + }, + "text/ecmascript": { + "source": "iana" + }, + "text/encaprtp": { + "source": "iana" + }, + "text/enriched": { + "source": "iana" + }, + "text/fhirpath": { + "source": "iana" + }, + "text/flexfec": { + "source": "iana" + }, + "text/fwdred": { + "source": "iana" + }, + "text/gff3": { + "source": "iana" + }, + "text/grammar-ref-list": { + "source": "iana" + }, + "text/html": { + "source": "iana", + "compressible": true, + "extensions": ["html","htm","shtml"] + }, + "text/jade": { + "extensions": ["jade"] + }, + "text/javascript": { + "source": "iana", + "compressible": true + }, + "text/jcr-cnd": { + "source": "iana" + }, + "text/jsx": { + "compressible": true, + "extensions": ["jsx"] + }, + "text/less": { + "compressible": true, + "extensions": ["less"] + }, + "text/markdown": { + "source": "iana", + "compressible": true, + "extensions": ["markdown","md"] + }, + "text/mathml": { + "source": "nginx", + "extensions": ["mml"] + }, + "text/mdx": { + "compressible": true, + "extensions": ["mdx"] + }, + "text/mizar": { + "source": "iana" + }, + "text/n3": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["n3"] + }, + "text/parameters": { + "source": "iana", + "charset": "UTF-8" + }, + "text/parityfec": { + "source": "iana" + }, + "text/plain": { + "source": "iana", + "compressible": true, + "extensions": ["txt","text","conf","def","list","log","in","ini"] + }, + "text/provenance-notation": { + "source": "iana", + "charset": "UTF-8" + }, + "text/prs.fallenstein.rst": { + "source": "iana" + }, + "text/prs.lines.tag": { + "source": "iana", + "extensions": ["dsc"] + }, + "text/prs.prop.logic": { + "source": "iana" + }, + "text/raptorfec": { + "source": "iana" + }, + "text/red": { + "source": "iana" + }, + "text/rfc822-headers": { + "source": "iana" + }, + "text/richtext": { + "source": "iana", + "compressible": true, + "extensions": ["rtx"] + }, + "text/rtf": { + "source": "iana", + "compressible": true, + "extensions": ["rtf"] + }, + "text/rtp-enc-aescm128": { + "source": "iana" + }, + "text/rtploopback": { + "source": "iana" + }, + "text/rtx": { + "source": "iana" + }, + "text/sgml": { + "source": "iana", + "extensions": ["sgml","sgm"] + }, + "text/shaclc": { + "source": "iana" + }, + "text/shex": { + "source": "iana", + "extensions": ["shex"] + }, + "text/slim": { + "extensions": ["slim","slm"] + }, + "text/spdx": { + "source": "iana", + "extensions": ["spdx"] + }, + "text/strings": { + "source": "iana" + }, + "text/stylus": { + "extensions": ["stylus","styl"] + }, + "text/t140": { + "source": "iana" + }, + "text/tab-separated-values": { + "source": "iana", + "compressible": true, + "extensions": ["tsv"] + }, + "text/troff": { + "source": "iana", + "extensions": ["t","tr","roff","man","me","ms"] + }, + "text/turtle": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["ttl"] + }, + "text/ulpfec": { + "source": "iana" + }, + "text/uri-list": { + "source": "iana", + "compressible": true, + "extensions": ["uri","uris","urls"] + }, + "text/vcard": { + "source": "iana", + "compressible": true, + "extensions": ["vcard"] + }, + "text/vnd.a": { + "source": "iana" + }, + "text/vnd.abc": { + "source": "iana" + }, + "text/vnd.ascii-art": { + "source": "iana" + }, + "text/vnd.curl": { + "source": "iana", + "extensions": ["curl"] + }, + "text/vnd.curl.dcurl": { + "source": "apache", + "extensions": ["dcurl"] + }, + "text/vnd.curl.mcurl": { + "source": "apache", + "extensions": ["mcurl"] + }, + "text/vnd.curl.scurl": { + "source": "apache", + "extensions": ["scurl"] + }, + "text/vnd.debian.copyright": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.dmclientscript": { + "source": "iana" + }, + "text/vnd.dvb.subtitle": { + "source": "iana", + "extensions": ["sub"] + }, + "text/vnd.esmertec.theme-descriptor": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.familysearch.gedcom": { + "source": "iana", + "extensions": ["ged"] + }, + "text/vnd.ficlab.flt": { + "source": "iana" + }, + "text/vnd.fly": { + "source": "iana", + "extensions": ["fly"] + }, + "text/vnd.fmi.flexstor": { + "source": "iana", + "extensions": ["flx"] + }, + "text/vnd.gml": { + "source": "iana" + }, + "text/vnd.graphviz": { + "source": "iana", + "extensions": ["gv"] + }, + "text/vnd.hans": { + "source": "iana" + }, + "text/vnd.hgl": { + "source": "iana" + }, + "text/vnd.in3d.3dml": { + "source": "iana", + "extensions": ["3dml"] + }, + "text/vnd.in3d.spot": { + "source": "iana", + "extensions": ["spot"] + }, + "text/vnd.iptc.newsml": { + "source": "iana" + }, + "text/vnd.iptc.nitf": { + "source": "iana" + }, + "text/vnd.latex-z": { + "source": "iana" + }, + "text/vnd.motorola.reflex": { + "source": "iana" + }, + "text/vnd.ms-mediapackage": { + "source": "iana" + }, + "text/vnd.net2phone.commcenter.command": { + "source": "iana" + }, + "text/vnd.radisys.msml-basic-layout": { + "source": "iana" + }, + "text/vnd.senx.warpscript": { + "source": "iana" + }, + "text/vnd.si.uricatalogue": { + "source": "iana" + }, + "text/vnd.sosi": { + "source": "iana" + }, + "text/vnd.sun.j2me.app-descriptor": { + "source": "iana", + "charset": "UTF-8", + "extensions": ["jad"] + }, + "text/vnd.trolltech.linguist": { + "source": "iana", + "charset": "UTF-8" + }, + "text/vnd.wap.si": { + "source": "iana" + }, + "text/vnd.wap.sl": { + "source": "iana" + }, + "text/vnd.wap.wml": { + "source": "iana", + "extensions": ["wml"] + }, + "text/vnd.wap.wmlscript": { + "source": "iana", + "extensions": ["wmls"] + }, + "text/vtt": { + "source": "iana", + "charset": "UTF-8", + "compressible": true, + "extensions": ["vtt"] + }, + "text/x-asm": { + "source": "apache", + "extensions": ["s","asm"] + }, + "text/x-c": { + "source": "apache", + "extensions": ["c","cc","cxx","cpp","h","hh","dic"] + }, + "text/x-component": { + "source": "nginx", + "extensions": ["htc"] + }, + "text/x-fortran": { + "source": "apache", + "extensions": ["f","for","f77","f90"] + }, + "text/x-gwt-rpc": { + "compressible": true + }, + "text/x-handlebars-template": { + "extensions": ["hbs"] + }, + "text/x-java-source": { + "source": "apache", + "extensions": ["java"] + }, + "text/x-jquery-tmpl": { + "compressible": true + }, + "text/x-lua": { + "extensions": ["lua"] + }, + "text/x-markdown": { + "compressible": true, + "extensions": ["mkd"] + }, + "text/x-nfo": { + "source": "apache", + "extensions": ["nfo"] + }, + "text/x-opml": { + "source": "apache", + "extensions": ["opml"] + }, + "text/x-org": { + "compressible": true, + "extensions": ["org"] + }, + "text/x-pascal": { + "source": "apache", + "extensions": ["p","pas"] + }, + "text/x-processing": { + "compressible": true, + "extensions": ["pde"] + }, + "text/x-sass": { + "extensions": ["sass"] + }, + "text/x-scss": { + "extensions": ["scss"] + }, + "text/x-setext": { + "source": "apache", + "extensions": ["etx"] + }, + "text/x-sfv": { + "source": "apache", + "extensions": ["sfv"] + }, + "text/x-suse-ymp": { + "compressible": true, + "extensions": ["ymp"] + }, + "text/x-uuencode": { + "source": "apache", + "extensions": ["uu"] + }, + "text/x-vcalendar": { + "source": "apache", + "extensions": ["vcs"] + }, + "text/x-vcard": { + "source": "apache", + "extensions": ["vcf"] + }, + "text/xml": { + "source": "iana", + "compressible": true, + "extensions": ["xml"] + }, + "text/xml-external-parsed-entity": { + "source": "iana" + }, + "text/yaml": { + "compressible": true, + "extensions": ["yaml","yml"] + }, + "video/1d-interleaved-parityfec": { + "source": "iana" + }, + "video/3gpp": { + "source": "iana", + "extensions": ["3gp","3gpp"] + }, + "video/3gpp-tt": { + "source": "iana" + }, + "video/3gpp2": { + "source": "iana", + "extensions": ["3g2"] + }, + "video/av1": { + "source": "iana" + }, + "video/bmpeg": { + "source": "iana" + }, + "video/bt656": { + "source": "iana" + }, + "video/celb": { + "source": "iana" + }, + "video/dv": { + "source": "iana" + }, + "video/encaprtp": { + "source": "iana" + }, + "video/ffv1": { + "source": "iana" + }, + "video/flexfec": { + "source": "iana" + }, + "video/h261": { + "source": "iana", + "extensions": ["h261"] + }, + "video/h263": { + "source": "iana", + "extensions": ["h263"] + }, + "video/h263-1998": { + "source": "iana" + }, + "video/h263-2000": { + "source": "iana" + }, + "video/h264": { + "source": "iana", + "extensions": ["h264"] + }, + "video/h264-rcdo": { + "source": "iana" + }, + "video/h264-svc": { + "source": "iana" + }, + "video/h265": { + "source": "iana" + }, + "video/iso.segment": { + "source": "iana", + "extensions": ["m4s"] + }, + "video/jpeg": { + "source": "iana", + "extensions": ["jpgv"] + }, + "video/jpeg2000": { + "source": "iana" + }, + "video/jpm": { + "source": "apache", + "extensions": ["jpm","jpgm"] + }, + "video/jxsv": { + "source": "iana" + }, + "video/mj2": { + "source": "iana", + "extensions": ["mj2","mjp2"] + }, + "video/mp1s": { + "source": "iana" + }, + "video/mp2p": { + "source": "iana" + }, + "video/mp2t": { + "source": "iana", + "extensions": ["ts"] + }, + "video/mp4": { + "source": "iana", + "compressible": false, + "extensions": ["mp4","mp4v","mpg4"] + }, + "video/mp4v-es": { + "source": "iana" + }, + "video/mpeg": { + "source": "iana", + "compressible": false, + "extensions": ["mpeg","mpg","mpe","m1v","m2v"] + }, + "video/mpeg4-generic": { + "source": "iana" + }, + "video/mpv": { + "source": "iana" + }, + "video/nv": { + "source": "iana" + }, + "video/ogg": { + "source": "iana", + "compressible": false, + "extensions": ["ogv"] + }, + "video/parityfec": { + "source": "iana" + }, + "video/pointer": { + "source": "iana" + }, + "video/quicktime": { + "source": "iana", + "compressible": false, + "extensions": ["qt","mov"] + }, + "video/raptorfec": { + "source": "iana" + }, + "video/raw": { + "source": "iana" + }, + "video/rtp-enc-aescm128": { + "source": "iana" + }, + "video/rtploopback": { + "source": "iana" + }, + "video/rtx": { + "source": "iana" + }, + "video/scip": { + "source": "iana" + }, + "video/smpte291": { + "source": "iana" + }, + "video/smpte292m": { + "source": "iana" + }, + "video/ulpfec": { + "source": "iana" + }, + "video/vc1": { + "source": "iana" + }, + "video/vc2": { + "source": "iana" + }, + "video/vnd.cctv": { + "source": "iana" + }, + "video/vnd.dece.hd": { + "source": "iana", + "extensions": ["uvh","uvvh"] + }, + "video/vnd.dece.mobile": { + "source": "iana", + "extensions": ["uvm","uvvm"] + }, + "video/vnd.dece.mp4": { + "source": "iana" + }, + "video/vnd.dece.pd": { + "source": "iana", + "extensions": ["uvp","uvvp"] + }, + "video/vnd.dece.sd": { + "source": "iana", + "extensions": ["uvs","uvvs"] + }, + "video/vnd.dece.video": { + "source": "iana", + "extensions": ["uvv","uvvv"] + }, + "video/vnd.directv.mpeg": { + "source": "iana" + }, + "video/vnd.directv.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dlna.mpeg-tts": { + "source": "iana" + }, + "video/vnd.dvb.file": { + "source": "iana", + "extensions": ["dvb"] + }, + "video/vnd.fvt": { + "source": "iana", + "extensions": ["fvt"] + }, + "video/vnd.hns.video": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.1dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-1010": { + "source": "iana" + }, + "video/vnd.iptvforum.2dparityfec-2005": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsavc": { + "source": "iana" + }, + "video/vnd.iptvforum.ttsmpeg2": { + "source": "iana" + }, + "video/vnd.motorola.video": { + "source": "iana" + }, + "video/vnd.motorola.videop": { + "source": "iana" + }, + "video/vnd.mpegurl": { + "source": "iana", + "extensions": ["mxu","m4u"] + }, + "video/vnd.ms-playready.media.pyv": { + "source": "iana", + "extensions": ["pyv"] + }, + "video/vnd.nokia.interleaved-multimedia": { + "source": "iana" + }, + "video/vnd.nokia.mp4vr": { + "source": "iana" + }, + "video/vnd.nokia.videovoip": { + "source": "iana" + }, + "video/vnd.objectvideo": { + "source": "iana" + }, + "video/vnd.radgamettools.bink": { + "source": "iana" + }, + "video/vnd.radgamettools.smacker": { + "source": "iana" + }, + "video/vnd.sealed.mpeg1": { + "source": "iana" + }, + "video/vnd.sealed.mpeg4": { + "source": "iana" + }, + "video/vnd.sealed.swf": { + "source": "iana" + }, + "video/vnd.sealedmedia.softseal.mov": { + "source": "iana" + }, + "video/vnd.uvvu.mp4": { + "source": "iana", + "extensions": ["uvu","uvvu"] + }, + "video/vnd.vivo": { + "source": "iana", + "extensions": ["viv"] + }, + "video/vnd.youtube.yt": { + "source": "iana" + }, + "video/vp8": { + "source": "iana" + }, + "video/vp9": { + "source": "iana" + }, + "video/webm": { + "source": "apache", + "compressible": false, + "extensions": ["webm"] + }, + "video/x-f4v": { + "source": "apache", + "extensions": ["f4v"] + }, + "video/x-fli": { + "source": "apache", + "extensions": ["fli"] + }, + "video/x-flv": { + "source": "apache", + "compressible": false, + "extensions": ["flv"] + }, + "video/x-m4v": { + "source": "apache", + "extensions": ["m4v"] + }, + "video/x-matroska": { + "source": "apache", + "compressible": false, + "extensions": ["mkv","mk3d","mks"] + }, + "video/x-mng": { + "source": "apache", + "extensions": ["mng"] + }, + "video/x-ms-asf": { + "source": "apache", + "extensions": ["asf","asx"] + }, + "video/x-ms-vob": { + "source": "apache", + "extensions": ["vob"] + }, + "video/x-ms-wm": { + "source": "apache", + "extensions": ["wm"] + }, + "video/x-ms-wmv": { + "source": "apache", + "compressible": false, + "extensions": ["wmv"] + }, + "video/x-ms-wmx": { + "source": "apache", + "extensions": ["wmx"] + }, + "video/x-ms-wvx": { + "source": "apache", + "extensions": ["wvx"] + }, + "video/x-msvideo": { + "source": "apache", + "extensions": ["avi"] + }, + "video/x-sgi-movie": { + "source": "apache", + "extensions": ["movie"] + }, + "video/x-smv": { + "source": "apache", + "extensions": ["smv"] + }, + "x-conference/x-cooltalk": { + "source": "apache", + "extensions": ["ice"] + }, + "x-shader/x-fragment": { + "compressible": true + }, + "x-shader/x-vertex": { + "compressible": true + } +} diff --git a/node_modules/mime-db/index.js b/node_modules/mime-db/index.js new file mode 100644 index 0000000000..ec2be30de1 --- /dev/null +++ b/node_modules/mime-db/index.js @@ -0,0 +1,12 @@ +/*! + * mime-db + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2022 Douglas Christopher Wilson + * MIT Licensed + */ + +/** + * Module exports. + */ + +module.exports = require('./db.json') diff --git a/node_modules/mime-db/package.json b/node_modules/mime-db/package.json new file mode 100644 index 0000000000..32c14b8468 --- /dev/null +++ b/node_modules/mime-db/package.json @@ -0,0 +1,60 @@ +{ + "name": "mime-db", + "description": "Media Type Database", + "version": "1.52.0", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)", + "Robert Kieffer (http://github.com/broofa)" + ], + "license": "MIT", + "keywords": [ + "mime", + "db", + "type", + "types", + "database", + "charset", + "charsets" + ], + "repository": "jshttp/mime-db", + "devDependencies": { + "bluebird": "3.7.2", + "co": "4.6.0", + "cogent": "1.0.1", + "csv-parse": "4.16.3", + "eslint": "7.32.0", + "eslint-config-standard": "15.0.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.1.1", + "eslint-plugin-standard": "4.1.0", + "gnode": "0.1.2", + "media-typer": "1.1.0", + "mocha": "9.2.1", + "nyc": "15.1.0", + "raw-body": "2.5.0", + "stream-to-array": "2.3.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "README.md", + "db.json", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "build": "node scripts/build", + "fetch": "node scripts/fetch-apache && gnode scripts/fetch-iana && node scripts/fetch-nginx", + "lint": "eslint .", + "test": "mocha --reporter spec --bail --check-leaks test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "update": "npm run fetch && npm run build", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/mime-types/HISTORY.md b/node_modules/mime-types/HISTORY.md new file mode 100644 index 0000000000..c5043b75b9 --- /dev/null +++ b/node_modules/mime-types/HISTORY.md @@ -0,0 +1,397 @@ +2.1.35 / 2022-03-12 +=================== + + * deps: mime-db@1.52.0 + - Add extensions from IANA for more `image/*` types + - Add extension `.asc` to `application/pgp-keys` + - Add extensions to various XML types + - Add new upstream MIME types + +2.1.34 / 2021-11-08 +=================== + + * deps: mime-db@1.51.0 + - Add new upstream MIME types + +2.1.33 / 2021-10-01 +=================== + + * deps: mime-db@1.50.0 + - Add deprecated iWorks mime types and extensions + - Add new upstream MIME types + +2.1.32 / 2021-07-27 +=================== + + * deps: mime-db@1.49.0 + - Add extension `.trig` to `application/trig` + - Add new upstream MIME types + +2.1.31 / 2021-06-01 +=================== + + * deps: mime-db@1.48.0 + - Add extension `.mvt` to `application/vnd.mapbox-vector-tile` + - Add new upstream MIME types + +2.1.30 / 2021-04-02 +=================== + + * deps: mime-db@1.47.0 + - Add extension `.amr` to `audio/amr` + - Remove ambigious extensions from IANA for `application/*+xml` types + - Update primary extension to `.es` for `application/ecmascript` + +2.1.29 / 2021-02-17 +=================== + + * deps: mime-db@1.46.0 + - Add extension `.amr` to `audio/amr` + - Add extension `.m4s` to `video/iso.segment` + - Add extension `.opus` to `audio/ogg` + - Add new upstream MIME types + +2.1.28 / 2021-01-01 +=================== + + * deps: mime-db@1.45.0 + - Add `application/ubjson` with extension `.ubj` + - Add `image/avif` with extension `.avif` + - Add `image/ktx2` with extension `.ktx2` + - Add extension `.dbf` to `application/vnd.dbf` + - Add extension `.rar` to `application/vnd.rar` + - Add extension `.td` to `application/urc-targetdesc+xml` + - Add new upstream MIME types + - Fix extension of `application/vnd.apple.keynote` to be `.key` + +2.1.27 / 2020-04-23 +=================== + + * deps: mime-db@1.44.0 + - Add charsets from IANA + - Add extension `.cjs` to `application/node` + - Add new upstream MIME types + +2.1.26 / 2020-01-05 +=================== + + * deps: mime-db@1.43.0 + - Add `application/x-keepass2` with extension `.kdbx` + - Add extension `.mxmf` to `audio/mobile-xmf` + - Add extensions from IANA for `application/*+xml` types + - Add new upstream MIME types + +2.1.25 / 2019-11-12 +=================== + + * deps: mime-db@1.42.0 + - Add new upstream MIME types + - Add `application/toml` with extension `.toml` + - Add `image/vnd.ms-dds` with extension `.dds` + +2.1.24 / 2019-04-20 +=================== + + * deps: mime-db@1.40.0 + - Add extensions from IANA for `model/*` types + - Add `text/mdx` with extension `.mdx` + +2.1.23 / 2019-04-17 +=================== + + * deps: mime-db@~1.39.0 + - Add extensions `.siv` and `.sieve` to `application/sieve` + - Add new upstream MIME types + +2.1.22 / 2019-02-14 +=================== + + * deps: mime-db@~1.38.0 + - Add extension `.nq` to `application/n-quads` + - Add extension `.nt` to `application/n-triples` + - Add new upstream MIME types + +2.1.21 / 2018-10-19 +=================== + + * deps: mime-db@~1.37.0 + - Add extensions to HEIC image types + - Add new upstream MIME types + +2.1.20 / 2018-08-26 +=================== + + * deps: mime-db@~1.36.0 + - Add Apple file extensions from IANA + - Add extensions from IANA for `image/*` types + - Add new upstream MIME types + +2.1.19 / 2018-07-17 +=================== + + * deps: mime-db@~1.35.0 + - Add extension `.csl` to `application/vnd.citationstyles.style+xml` + - Add extension `.es` to `application/ecmascript` + - Add extension `.owl` to `application/rdf+xml` + - Add new upstream MIME types + - Add UTF-8 as default charset for `text/turtle` + +2.1.18 / 2018-02-16 +=================== + + * deps: mime-db@~1.33.0 + - Add `application/raml+yaml` with extension `.raml` + - Add `application/wasm` with extension `.wasm` + - Add `text/shex` with extension `.shex` + - Add extensions for JPEG-2000 images + - Add extensions from IANA for `message/*` types + - Add new upstream MIME types + - Update font MIME types + - Update `text/hjson` to registered `application/hjson` + +2.1.17 / 2017-09-01 +=================== + + * deps: mime-db@~1.30.0 + - Add `application/vnd.ms-outlook` + - Add `application/x-arj` + - Add extension `.mjs` to `application/javascript` + - Add glTF types and extensions + - Add new upstream MIME types + - Add `text/x-org` + - Add VirtualBox MIME types + - Fix `source` records for `video/*` types that are IANA + - Update `font/opentype` to registered `font/otf` + +2.1.16 / 2017-07-24 +=================== + + * deps: mime-db@~1.29.0 + - Add `application/fido.trusted-apps+json` + - Add extension `.wadl` to `application/vnd.sun.wadl+xml` + - Add extension `.gz` to `application/gzip` + - Add new upstream MIME types + - Update extensions `.md` and `.markdown` to be `text/markdown` + +2.1.15 / 2017-03-23 +=================== + + * deps: mime-db@~1.27.0 + - Add new mime types + - Add `image/apng` + +2.1.14 / 2017-01-14 +=================== + + * deps: mime-db@~1.26.0 + - Add new mime types + +2.1.13 / 2016-11-18 +=================== + + * deps: mime-db@~1.25.0 + - Add new mime types + +2.1.12 / 2016-09-18 +=================== + + * deps: mime-db@~1.24.0 + - Add new mime types + - Add `audio/mp3` + +2.1.11 / 2016-05-01 +=================== + + * deps: mime-db@~1.23.0 + - Add new mime types + +2.1.10 / 2016-02-15 +=================== + + * deps: mime-db@~1.22.0 + - Add new mime types + - Fix extension of `application/dash+xml` + - Update primary extension for `audio/mp4` + +2.1.9 / 2016-01-06 +================== + + * deps: mime-db@~1.21.0 + - Add new mime types + +2.1.8 / 2015-11-30 +================== + + * deps: mime-db@~1.20.0 + - Add new mime types + +2.1.7 / 2015-09-20 +================== + + * deps: mime-db@~1.19.0 + - Add new mime types + +2.1.6 / 2015-09-03 +================== + + * deps: mime-db@~1.18.0 + - Add new mime types + +2.1.5 / 2015-08-20 +================== + + * deps: mime-db@~1.17.0 + - Add new mime types + +2.1.4 / 2015-07-30 +================== + + * deps: mime-db@~1.16.0 + - Add new mime types + +2.1.3 / 2015-07-13 +================== + + * deps: mime-db@~1.15.0 + - Add new mime types + +2.1.2 / 2015-06-25 +================== + + * deps: mime-db@~1.14.0 + - Add new mime types + +2.1.1 / 2015-06-08 +================== + + * perf: fix deopt during mapping + +2.1.0 / 2015-06-07 +================== + + * Fix incorrectly treating extension-less file name as extension + - i.e. `'path/to/json'` will no longer return `application/json` + * Fix `.charset(type)` to accept parameters + * Fix `.charset(type)` to match case-insensitive + * Improve generation of extension to MIME mapping + * Refactor internals for readability and no argument reassignment + * Prefer `application/*` MIME types from the same source + * Prefer any type over `application/octet-stream` + * deps: mime-db@~1.13.0 + - Add nginx as a source + - Add new mime types + +2.0.14 / 2015-06-06 +=================== + + * deps: mime-db@~1.12.0 + - Add new mime types + +2.0.13 / 2015-05-31 +=================== + + * deps: mime-db@~1.11.0 + - Add new mime types + +2.0.12 / 2015-05-19 +=================== + + * deps: mime-db@~1.10.0 + - Add new mime types + +2.0.11 / 2015-05-05 +=================== + + * deps: mime-db@~1.9.1 + - Add new mime types + +2.0.10 / 2015-03-13 +=================== + + * deps: mime-db@~1.8.0 + - Add new mime types + +2.0.9 / 2015-02-09 +================== + + * deps: mime-db@~1.7.0 + - Add new mime types + - Community extensions ownership transferred from `node-mime` + +2.0.8 / 2015-01-29 +================== + + * deps: mime-db@~1.6.0 + - Add new mime types + +2.0.7 / 2014-12-30 +================== + + * deps: mime-db@~1.5.0 + - Add new mime types + - Fix various invalid MIME type entries + +2.0.6 / 2014-12-30 +================== + + * deps: mime-db@~1.4.0 + - Add new mime types + - Fix various invalid MIME type entries + - Remove example template MIME types + +2.0.5 / 2014-12-29 +================== + + * deps: mime-db@~1.3.1 + - Fix missing extensions + +2.0.4 / 2014-12-10 +================== + + * deps: mime-db@~1.3.0 + - Add new mime types + +2.0.3 / 2014-11-09 +================== + + * deps: mime-db@~1.2.0 + - Add new mime types + +2.0.2 / 2014-09-28 +================== + + * deps: mime-db@~1.1.0 + - Add new mime types + - Update charsets + +2.0.1 / 2014-09-07 +================== + + * Support Node.js 0.6 + +2.0.0 / 2014-09-02 +================== + + * Use `mime-db` + * Remove `.define()` + +1.0.2 / 2014-08-04 +================== + + * Set charset=utf-8 for `text/javascript` + +1.0.1 / 2014-06-24 +================== + + * Add `text/jsx` type + +1.0.0 / 2014-05-12 +================== + + * Return `false` for unknown types + * Set charset=utf-8 for `application/json` + +0.1.0 / 2014-05-02 +================== + + * Initial release diff --git a/node_modules/mime-types/LICENSE b/node_modules/mime-types/LICENSE new file mode 100644 index 0000000000..06166077be --- /dev/null +++ b/node_modules/mime-types/LICENSE @@ -0,0 +1,23 @@ +(The MIT License) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/mime-types/README.md b/node_modules/mime-types/README.md new file mode 100644 index 0000000000..48d2fb4772 --- /dev/null +++ b/node_modules/mime-types/README.md @@ -0,0 +1,113 @@ +# mime-types + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +The ultimate javascript content-type utility. + +Similar to [the `mime@1.x` module](https://www.npmjs.com/package/mime), except: + +- __No fallbacks.__ Instead of naively returning the first available type, + `mime-types` simply returns `false`, so do + `var type = mime.lookup('unrecognized') || 'application/octet-stream'`. +- No `new Mime()` business, so you could do `var lookup = require('mime-types').lookup`. +- No `.define()` functionality +- Bug fixes for `.lookup(path)` + +Otherwise, the API is compatible with `mime` 1.x. + +## Install + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install mime-types +``` + +## Adding Types + +All mime types are based on [mime-db](https://www.npmjs.com/package/mime-db), +so open a PR there if you'd like to add mime types. + +## API + +```js +var mime = require('mime-types') +``` + +All functions return `false` if input is invalid or not found. + +### mime.lookup(path) + +Lookup the content-type associated with a file. + +```js +mime.lookup('json') // 'application/json' +mime.lookup('.md') // 'text/markdown' +mime.lookup('file.html') // 'text/html' +mime.lookup('folder/file.js') // 'application/javascript' +mime.lookup('folder/.htaccess') // false + +mime.lookup('cats') // false +``` + +### mime.contentType(type) + +Create a full content-type header given a content-type or extension. +When given an extension, `mime.lookup` is used to get the matching +content-type, otherwise the given content-type is used. Then if the +content-type does not already have a `charset` parameter, `mime.charset` +is used to get the default charset and add to the returned content-type. + +```js +mime.contentType('markdown') // 'text/x-markdown; charset=utf-8' +mime.contentType('file.json') // 'application/json; charset=utf-8' +mime.contentType('text/html') // 'text/html; charset=utf-8' +mime.contentType('text/html; charset=iso-8859-1') // 'text/html; charset=iso-8859-1' + +// from a full path +mime.contentType(path.extname('/path/to/file.json')) // 'application/json; charset=utf-8' +``` + +### mime.extension(type) + +Get the default extension for a content-type. + +```js +mime.extension('application/octet-stream') // 'bin' +``` + +### mime.charset(type) + +Lookup the implied default charset of a content-type. + +```js +mime.charset('text/markdown') // 'UTF-8' +``` + +### var type = mime.types[extension] + +A map of content-types by extension. + +### [extensions...] = mime.extensions[type] + +A map of extensions by content-type. + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/mime-types/master?label=ci +[ci-url]: https://github.com/jshttp/mime-types/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/mime-types/master +[coveralls-url]: https://coveralls.io/r/jshttp/mime-types?branch=master +[node-version-image]: https://badgen.net/npm/node/mime-types +[node-version-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/mime-types +[npm-url]: https://npmjs.org/package/mime-types +[npm-version-image]: https://badgen.net/npm/v/mime-types diff --git a/node_modules/mime-types/index.js b/node_modules/mime-types/index.js new file mode 100644 index 0000000000..b9f34d5991 --- /dev/null +++ b/node_modules/mime-types/index.js @@ -0,0 +1,188 @@ +/*! + * mime-types + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var db = require('mime-db') +var extname = require('path').extname + +/** + * Module variables. + * @private + */ + +var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ +var TEXT_TYPE_REGEXP = /^text\//i + +/** + * Module exports. + * @public + */ + +exports.charset = charset +exports.charsets = { lookup: charset } +exports.contentType = contentType +exports.extension = extension +exports.extensions = Object.create(null) +exports.lookup = lookup +exports.types = Object.create(null) + +// Populate the extensions/types maps +populateMaps(exports.extensions, exports.types) + +/** + * Get the default charset for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function charset (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + var mime = match && db[match[1].toLowerCase()] + + if (mime && mime.charset) { + return mime.charset + } + + // default text/* to utf-8 + if (match && TEXT_TYPE_REGEXP.test(match[1])) { + return 'UTF-8' + } + + return false +} + +/** + * Create a full Content-Type header given a MIME type or extension. + * + * @param {string} str + * @return {boolean|string} + */ + +function contentType (str) { + // TODO: should this even be in this module? + if (!str || typeof str !== 'string') { + return false + } + + var mime = str.indexOf('/') === -1 + ? exports.lookup(str) + : str + + if (!mime) { + return false + } + + // TODO: use content-type or other module + if (mime.indexOf('charset') === -1) { + var charset = exports.charset(mime) + if (charset) mime += '; charset=' + charset.toLowerCase() + } + + return mime +} + +/** + * Get the default extension for a MIME type. + * + * @param {string} type + * @return {boolean|string} + */ + +function extension (type) { + if (!type || typeof type !== 'string') { + return false + } + + // TODO: use media-typer + var match = EXTRACT_TYPE_REGEXP.exec(type) + + // get extensions + var exts = match && exports.extensions[match[1].toLowerCase()] + + if (!exts || !exts.length) { + return false + } + + return exts[0] +} + +/** + * Lookup the MIME type for a file path/extension. + * + * @param {string} path + * @return {boolean|string} + */ + +function lookup (path) { + if (!path || typeof path !== 'string') { + return false + } + + // get the extension ("ext" or ".ext" or full path) + var extension = extname('x.' + path) + .toLowerCase() + .substr(1) + + if (!extension) { + return false + } + + return exports.types[extension] || false +} + +/** + * Populate the extensions and types maps. + * @private + */ + +function populateMaps (extensions, types) { + // source preference (least -> most) + var preference = ['nginx', 'apache', undefined, 'iana'] + + Object.keys(db).forEach(function forEachMimeType (type) { + var mime = db[type] + var exts = mime.extensions + + if (!exts || !exts.length) { + return + } + + // mime -> extensions + extensions[type] = exts + + // extension -> mime + for (var i = 0; i < exts.length; i++) { + var extension = exts[i] + + if (types[extension]) { + var from = preference.indexOf(db[types[extension]].source) + var to = preference.indexOf(mime.source) + + if (types[extension] !== 'application/octet-stream' && + (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { + // skip the remapping + continue + } + } + + // set the extension -> mime + types[extension] = type + } + }) +} diff --git a/node_modules/mime-types/package.json b/node_modules/mime-types/package.json new file mode 100644 index 0000000000..bbef696450 --- /dev/null +++ b/node_modules/mime-types/package.json @@ -0,0 +1,44 @@ +{ + "name": "mime-types", + "description": "The ultimate javascript content-type utility.", + "version": "2.1.35", + "contributors": [ + "Douglas Christopher Wilson ", + "Jeremiah Senkpiel (https://searchbeam.jit.su)", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "keywords": [ + "mime", + "types" + ], + "repository": "jshttp/mime-types", + "dependencies": { + "mime-db": "1.52.0" + }, + "devDependencies": { + "eslint": "7.32.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.25.4", + "eslint-plugin-markdown": "2.2.1", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "5.2.0", + "eslint-plugin-standard": "4.1.0", + "mocha": "9.2.2", + "nyc": "15.1.0" + }, + "files": [ + "HISTORY.md", + "LICENSE", + "index.js" + ], + "engines": { + "node": ">= 0.6" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --reporter spec test/test.js", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test" + } +} diff --git a/node_modules/psl/.env b/node_modules/psl/.env new file mode 100644 index 0000000000..e69de29bb2 diff --git a/node_modules/psl/LICENSE b/node_modules/psl/LICENSE new file mode 100644 index 0000000000..78d792eda6 --- /dev/null +++ b/node_modules/psl/LICENSE @@ -0,0 +1,9 @@ +The MIT License (MIT) + +Copyright (c) 2017 Lupo Montero lupomontero@gmail.com + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/psl/README.md b/node_modules/psl/README.md new file mode 100644 index 0000000000..e05710a2b6 --- /dev/null +++ b/node_modules/psl/README.md @@ -0,0 +1,211 @@ +# psl (Public Suffix List) + +[![Node.js CI](https://github.com/lupomontero/psl/actions/workflows/node.js.yml/badge.svg)](https://github.com/lupomontero/psl/actions/workflows/node.js.yml) + +`psl` is a `JavaScript` domain name parser based on the +[Public Suffix List](https://publicsuffix.org/). + +This implementation is tested against the +[test data hosted by Mozilla](http://mxr.mozilla.org/mozilla-central/source/netwerk/test/unit/data/test_psl.txt?raw=1) +and kindly provided by [Comodo](https://www.comodo.com/). + +Cross browser testing provided by +[BrowserStack](https://www.browserstack.com/) + +## What is the Public Suffix List? + +The Public Suffix List is a cross-vendor initiative to provide an accurate list +of domain name suffixes. + +The Public Suffix List is an initiative of the Mozilla Project, but is +maintained as a community resource. It is available for use in any software, +but was originally created to meet the needs of browser manufacturers. + +A "public suffix" is one under which Internet users can directly register names. +Some examples of public suffixes are ".com", ".co.uk" and "pvt.k12.wy.us". The +Public Suffix List is a list of all known public suffixes. + +Source: http://publicsuffix.org + + +## Installation + +### Node.js + +```sh +npm install --save psl +``` + +### Browser + +Download [psl.min.js](https://raw.githubusercontent.com/lupomontero/psl/master/dist/psl.min.js) +and include it in a script tag. + +```html + +``` + +This script is browserified and wrapped in a [umd](https://github.com/umdjs/umd) +wrapper so you should be able to use it standalone or together with a module +loader. + +## API + +### `psl.parse(domain)` + +Parse domain based on Public Suffix List. Returns an `Object` with the following +properties: + +* `tld`: Top level domain (this is the _public suffix_). +* `sld`: Second level domain (the first private part of the domain name). +* `domain`: The domain name is the `sld` + `tld`. +* `subdomain`: Optional parts left of the domain. + +#### Example: + +```js +var psl = require('psl'); + +// Parse domain without subdomain +var parsed = psl.parse('google.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'google' +console.log(parsed.domain); // 'google.com' +console.log(parsed.subdomain); // null + +// Parse domain with subdomain +var parsed = psl.parse('www.google.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'google' +console.log(parsed.domain); // 'google.com' +console.log(parsed.subdomain); // 'www' + +// Parse domain with nested subdomains +var parsed = psl.parse('a.b.c.d.foo.com'); +console.log(parsed.tld); // 'com' +console.log(parsed.sld); // 'foo' +console.log(parsed.domain); // 'foo.com' +console.log(parsed.subdomain); // 'a.b.c.d' +``` + +### `psl.get(domain)` + +Get domain name, `sld` + `tld`. Returns `null` if not valid. + +#### Example: + +```js +var psl = require('psl'); + +// null input. +psl.get(null); // null + +// Mixed case. +psl.get('COM'); // null +psl.get('example.COM'); // 'example.com' +psl.get('WwW.example.COM'); // 'example.com' + +// Unlisted TLD. +psl.get('example'); // null +psl.get('example.example'); // 'example.example' +psl.get('b.example.example'); // 'example.example' +psl.get('a.b.example.example'); // 'example.example' + +// TLD with only 1 rule. +psl.get('biz'); // null +psl.get('domain.biz'); // 'domain.biz' +psl.get('b.domain.biz'); // 'domain.biz' +psl.get('a.b.domain.biz'); // 'domain.biz' + +// TLD with some 2-level rules. +psl.get('uk.com'); // null); +psl.get('example.uk.com'); // 'example.uk.com'); +psl.get('b.example.uk.com'); // 'example.uk.com'); + +// More complex TLD. +psl.get('c.kobe.jp'); // null +psl.get('b.c.kobe.jp'); // 'b.c.kobe.jp' +psl.get('a.b.c.kobe.jp'); // 'b.c.kobe.jp' +psl.get('city.kobe.jp'); // 'city.kobe.jp' +psl.get('www.city.kobe.jp'); // 'city.kobe.jp' + +// IDN labels. +psl.get('食狮.com.cn'); // '食狮.com.cn' +psl.get('食狮.公司.cn'); // '食狮.公司.cn' +psl.get('www.食狮.公司.cn'); // '食狮.公司.cn' + +// Same as above, but punycoded. +psl.get('xn--85x722f.com.cn'); // 'xn--85x722f.com.cn' +psl.get('xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' +psl.get('www.xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn' +``` + +### `psl.isValid(domain)` + +Check whether a domain has a valid Public Suffix. Returns a `Boolean` indicating +whether the domain has a valid Public Suffix. + +#### Example + +```js +var psl = require('psl'); + +psl.isValid('google.com'); // true +psl.isValid('www.google.com'); // true +psl.isValid('x.yz'); // false +``` + + +## Testing and Building + +Test are written using [`mocha`](https://mochajs.org/) and can be +run in two different environments: `node` and `phantomjs`. + +```sh +# This will run `eslint`, `mocha` and `karma`. +npm test + +# Individual test environments +# Run tests in node only. +./node_modules/.bin/mocha test +# Run tests in phantomjs only. +./node_modules/.bin/karma start ./karma.conf.js --single-run + +# Build data (parse raw list) and create dist files +npm run build +``` + +Feel free to fork if you see possible improvements! + + +## Acknowledgements + +* Mozilla Foundation's [Public Suffix List](https://publicsuffix.org/) +* Thanks to Rob Stradling of [Comodo](https://www.comodo.com/) for providing + test data. +* Inspired by [weppos/publicsuffix-ruby](https://github.com/weppos/publicsuffix-ruby) + + +## License + +The MIT License (MIT) + +Copyright (c) 2017 Lupo Montero + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/psl/browserstack-logo.svg b/node_modules/psl/browserstack-logo.svg new file mode 100644 index 0000000000..195f64d2fe --- /dev/null +++ b/node_modules/psl/browserstack-logo.svg @@ -0,0 +1,90 @@ + + + + +Browserstack-logo-white + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/node_modules/psl/data/rules.json b/node_modules/psl/data/rules.json new file mode 100644 index 0000000000..aaae4173cf --- /dev/null +++ b/node_modules/psl/data/rules.json @@ -0,0 +1,9376 @@ +[ +"ac", +"com.ac", +"edu.ac", +"gov.ac", +"net.ac", +"mil.ac", +"org.ac", +"ad", +"nom.ad", +"ae", +"co.ae", +"net.ae", +"org.ae", +"sch.ae", +"ac.ae", +"gov.ae", +"mil.ae", +"aero", +"accident-investigation.aero", +"accident-prevention.aero", +"aerobatic.aero", +"aeroclub.aero", +"aerodrome.aero", +"agents.aero", +"aircraft.aero", +"airline.aero", +"airport.aero", +"air-surveillance.aero", +"airtraffic.aero", +"air-traffic-control.aero", +"ambulance.aero", +"amusement.aero", +"association.aero", +"author.aero", +"ballooning.aero", +"broker.aero", +"caa.aero", +"cargo.aero", +"catering.aero", +"certification.aero", +"championship.aero", +"charter.aero", +"civilaviation.aero", +"club.aero", +"conference.aero", +"consultant.aero", +"consulting.aero", +"control.aero", +"council.aero", +"crew.aero", +"design.aero", +"dgca.aero", +"educator.aero", +"emergency.aero", +"engine.aero", +"engineer.aero", +"entertainment.aero", +"equipment.aero", +"exchange.aero", +"express.aero", +"federation.aero", +"flight.aero", +"fuel.aero", +"gliding.aero", +"government.aero", +"groundhandling.aero", +"group.aero", +"hanggliding.aero", +"homebuilt.aero", +"insurance.aero", +"journal.aero", +"journalist.aero", +"leasing.aero", +"logistics.aero", +"magazine.aero", +"maintenance.aero", +"media.aero", +"microlight.aero", +"modelling.aero", +"navigation.aero", +"parachuting.aero", +"paragliding.aero", +"passenger-association.aero", +"pilot.aero", +"press.aero", +"production.aero", +"recreation.aero", +"repbody.aero", +"res.aero", +"research.aero", +"rotorcraft.aero", +"safety.aero", +"scientist.aero", +"services.aero", +"show.aero", +"skydiving.aero", +"software.aero", +"student.aero", +"trader.aero", +"trading.aero", +"trainer.aero", +"union.aero", +"workinggroup.aero", +"works.aero", +"af", +"gov.af", +"com.af", +"org.af", +"net.af", +"edu.af", +"ag", +"com.ag", +"org.ag", +"net.ag", +"co.ag", +"nom.ag", +"ai", +"off.ai", +"com.ai", +"net.ai", +"org.ai", +"al", +"com.al", +"edu.al", +"gov.al", +"mil.al", +"net.al", +"org.al", +"am", +"co.am", +"com.am", +"commune.am", +"net.am", +"org.am", +"ao", +"ed.ao", +"gv.ao", +"og.ao", +"co.ao", +"pb.ao", +"it.ao", +"aq", +"ar", +"bet.ar", +"com.ar", +"coop.ar", +"edu.ar", +"gob.ar", +"gov.ar", +"int.ar", +"mil.ar", +"musica.ar", +"mutual.ar", +"net.ar", +"org.ar", +"senasa.ar", +"tur.ar", +"arpa", +"e164.arpa", +"in-addr.arpa", +"ip6.arpa", +"iris.arpa", +"uri.arpa", +"urn.arpa", +"as", +"gov.as", +"asia", +"at", +"ac.at", +"co.at", +"gv.at", +"or.at", +"sth.ac.at", +"au", +"com.au", +"net.au", +"org.au", +"edu.au", +"gov.au", +"asn.au", +"id.au", +"info.au", +"conf.au", +"oz.au", +"act.au", +"nsw.au", +"nt.au", +"qld.au", +"sa.au", +"tas.au", +"vic.au", +"wa.au", +"act.edu.au", +"catholic.edu.au", +"nsw.edu.au", +"nt.edu.au", +"qld.edu.au", +"sa.edu.au", +"tas.edu.au", +"vic.edu.au", +"wa.edu.au", +"qld.gov.au", +"sa.gov.au", +"tas.gov.au", +"vic.gov.au", +"wa.gov.au", +"schools.nsw.edu.au", +"aw", +"com.aw", +"ax", +"az", +"com.az", +"net.az", +"int.az", +"gov.az", +"org.az", +"edu.az", +"info.az", +"pp.az", +"mil.az", +"name.az", +"pro.az", +"biz.az", +"ba", +"com.ba", +"edu.ba", +"gov.ba", +"mil.ba", +"net.ba", +"org.ba", +"bb", +"biz.bb", +"co.bb", +"com.bb", +"edu.bb", +"gov.bb", +"info.bb", +"net.bb", +"org.bb", +"store.bb", +"tv.bb", +"*.bd", +"be", +"ac.be", +"bf", +"gov.bf", +"bg", +"a.bg", +"b.bg", +"c.bg", +"d.bg", +"e.bg", +"f.bg", +"g.bg", +"h.bg", +"i.bg", +"j.bg", +"k.bg", +"l.bg", +"m.bg", +"n.bg", +"o.bg", +"p.bg", +"q.bg", +"r.bg", +"s.bg", +"t.bg", +"u.bg", +"v.bg", +"w.bg", +"x.bg", +"y.bg", +"z.bg", +"0.bg", +"1.bg", +"2.bg", +"3.bg", +"4.bg", +"5.bg", +"6.bg", +"7.bg", +"8.bg", +"9.bg", +"bh", +"com.bh", +"edu.bh", +"net.bh", +"org.bh", +"gov.bh", +"bi", +"co.bi", +"com.bi", +"edu.bi", +"or.bi", +"org.bi", +"biz", +"bj", +"asso.bj", +"barreau.bj", +"gouv.bj", +"bm", +"com.bm", +"edu.bm", +"gov.bm", +"net.bm", +"org.bm", +"bn", +"com.bn", +"edu.bn", +"gov.bn", +"net.bn", +"org.bn", +"bo", +"com.bo", +"edu.bo", +"gob.bo", +"int.bo", +"org.bo", +"net.bo", +"mil.bo", +"tv.bo", +"web.bo", +"academia.bo", +"agro.bo", +"arte.bo", +"blog.bo", +"bolivia.bo", +"ciencia.bo", +"cooperativa.bo", +"democracia.bo", +"deporte.bo", +"ecologia.bo", +"economia.bo", +"empresa.bo", +"indigena.bo", +"industria.bo", +"info.bo", +"medicina.bo", +"movimiento.bo", +"musica.bo", +"natural.bo", +"nombre.bo", +"noticias.bo", +"patria.bo", +"politica.bo", +"profesional.bo", +"plurinacional.bo", +"pueblo.bo", +"revista.bo", +"salud.bo", +"tecnologia.bo", +"tksat.bo", +"transporte.bo", +"wiki.bo", +"br", +"9guacu.br", +"abc.br", +"adm.br", +"adv.br", +"agr.br", +"aju.br", +"am.br", +"anani.br", +"aparecida.br", +"app.br", +"arq.br", +"art.br", +"ato.br", +"b.br", +"barueri.br", +"belem.br", +"bhz.br", +"bib.br", +"bio.br", +"blog.br", +"bmd.br", +"boavista.br", +"bsb.br", +"campinagrande.br", +"campinas.br", +"caxias.br", +"cim.br", +"cng.br", +"cnt.br", +"com.br", +"contagem.br", +"coop.br", +"coz.br", +"cri.br", +"cuiaba.br", +"curitiba.br", +"def.br", +"des.br", +"det.br", +"dev.br", +"ecn.br", +"eco.br", +"edu.br", +"emp.br", +"enf.br", +"eng.br", +"esp.br", +"etc.br", +"eti.br", +"far.br", +"feira.br", +"flog.br", +"floripa.br", +"fm.br", +"fnd.br", +"fortal.br", +"fot.br", +"foz.br", +"fst.br", +"g12.br", +"geo.br", +"ggf.br", +"goiania.br", +"gov.br", +"ac.gov.br", +"al.gov.br", +"am.gov.br", +"ap.gov.br", +"ba.gov.br", +"ce.gov.br", +"df.gov.br", +"es.gov.br", +"go.gov.br", +"ma.gov.br", +"mg.gov.br", +"ms.gov.br", +"mt.gov.br", +"pa.gov.br", +"pb.gov.br", +"pe.gov.br", +"pi.gov.br", +"pr.gov.br", +"rj.gov.br", +"rn.gov.br", +"ro.gov.br", +"rr.gov.br", +"rs.gov.br", +"sc.gov.br", +"se.gov.br", +"sp.gov.br", +"to.gov.br", +"gru.br", +"imb.br", +"ind.br", +"inf.br", +"jab.br", +"jampa.br", +"jdf.br", +"joinville.br", +"jor.br", +"jus.br", +"leg.br", +"lel.br", +"log.br", +"londrina.br", +"macapa.br", +"maceio.br", +"manaus.br", +"maringa.br", +"mat.br", +"med.br", +"mil.br", +"morena.br", +"mp.br", +"mus.br", +"natal.br", +"net.br", +"niteroi.br", +"*.nom.br", +"not.br", +"ntr.br", +"odo.br", +"ong.br", +"org.br", +"osasco.br", +"palmas.br", +"poa.br", +"ppg.br", +"pro.br", +"psc.br", +"psi.br", +"pvh.br", +"qsl.br", +"radio.br", +"rec.br", +"recife.br", +"rep.br", +"ribeirao.br", +"rio.br", +"riobranco.br", +"riopreto.br", +"salvador.br", +"sampa.br", +"santamaria.br", +"santoandre.br", +"saobernardo.br", +"saogonca.br", +"seg.br", +"sjc.br", +"slg.br", +"slz.br", +"sorocaba.br", +"srv.br", +"taxi.br", +"tc.br", +"tec.br", +"teo.br", +"the.br", +"tmp.br", +"trd.br", +"tur.br", +"tv.br", +"udi.br", +"vet.br", +"vix.br", +"vlog.br", +"wiki.br", +"zlg.br", +"bs", +"com.bs", +"net.bs", +"org.bs", +"edu.bs", +"gov.bs", +"bt", +"com.bt", +"edu.bt", +"gov.bt", +"net.bt", +"org.bt", +"bv", +"bw", +"co.bw", +"org.bw", +"by", +"gov.by", +"mil.by", +"com.by", +"of.by", +"bz", +"com.bz", +"net.bz", +"org.bz", +"edu.bz", +"gov.bz", +"ca", +"ab.ca", +"bc.ca", +"mb.ca", +"nb.ca", +"nf.ca", +"nl.ca", +"ns.ca", +"nt.ca", +"nu.ca", +"on.ca", +"pe.ca", +"qc.ca", +"sk.ca", +"yk.ca", +"gc.ca", +"cat", +"cc", +"cd", +"gov.cd", +"cf", +"cg", +"ch", +"ci", +"org.ci", +"or.ci", +"com.ci", +"co.ci", +"edu.ci", +"ed.ci", +"ac.ci", +"net.ci", +"go.ci", +"asso.ci", +"aéroport.ci", +"int.ci", +"presse.ci", +"md.ci", +"gouv.ci", +"*.ck", +"!www.ck", +"cl", +"co.cl", +"gob.cl", +"gov.cl", +"mil.cl", +"cm", +"co.cm", +"com.cm", +"gov.cm", +"net.cm", +"cn", +"ac.cn", +"com.cn", +"edu.cn", +"gov.cn", +"net.cn", +"org.cn", +"mil.cn", +"公司.cn", +"网络.cn", +"網絡.cn", +"ah.cn", +"bj.cn", +"cq.cn", +"fj.cn", +"gd.cn", +"gs.cn", +"gz.cn", +"gx.cn", +"ha.cn", +"hb.cn", +"he.cn", +"hi.cn", +"hl.cn", +"hn.cn", +"jl.cn", +"js.cn", +"jx.cn", +"ln.cn", +"nm.cn", +"nx.cn", +"qh.cn", +"sc.cn", +"sd.cn", +"sh.cn", +"sn.cn", +"sx.cn", +"tj.cn", +"xj.cn", +"xz.cn", +"yn.cn", +"zj.cn", +"hk.cn", +"mo.cn", +"tw.cn", +"co", +"arts.co", +"com.co", +"edu.co", +"firm.co", +"gov.co", +"info.co", +"int.co", +"mil.co", +"net.co", +"nom.co", +"org.co", +"rec.co", +"web.co", +"com", +"coop", +"cr", +"ac.cr", +"co.cr", +"ed.cr", +"fi.cr", +"go.cr", +"or.cr", +"sa.cr", +"cu", +"com.cu", +"edu.cu", +"org.cu", +"net.cu", +"gov.cu", +"inf.cu", +"cv", +"com.cv", +"edu.cv", +"int.cv", +"nome.cv", +"org.cv", +"cw", +"com.cw", +"edu.cw", +"net.cw", +"org.cw", +"cx", +"gov.cx", +"cy", +"ac.cy", +"biz.cy", +"com.cy", +"ekloges.cy", +"gov.cy", +"ltd.cy", +"mil.cy", +"net.cy", +"org.cy", +"press.cy", +"pro.cy", +"tm.cy", +"cz", +"de", +"dj", +"dk", +"dm", +"com.dm", +"net.dm", +"org.dm", +"edu.dm", +"gov.dm", +"do", +"art.do", +"com.do", +"edu.do", +"gob.do", +"gov.do", +"mil.do", +"net.do", +"org.do", +"sld.do", +"web.do", +"dz", +"art.dz", +"asso.dz", +"com.dz", +"edu.dz", +"gov.dz", +"org.dz", +"net.dz", +"pol.dz", +"soc.dz", +"tm.dz", +"ec", +"com.ec", +"info.ec", +"net.ec", +"fin.ec", +"k12.ec", +"med.ec", +"pro.ec", +"org.ec", +"edu.ec", +"gov.ec", +"gob.ec", +"mil.ec", +"edu", +"ee", +"edu.ee", +"gov.ee", +"riik.ee", +"lib.ee", +"med.ee", +"com.ee", +"pri.ee", +"aip.ee", +"org.ee", +"fie.ee", +"eg", +"com.eg", +"edu.eg", +"eun.eg", +"gov.eg", +"mil.eg", +"name.eg", +"net.eg", +"org.eg", +"sci.eg", +"*.er", +"es", +"com.es", +"nom.es", +"org.es", +"gob.es", +"edu.es", +"et", +"com.et", +"gov.et", +"org.et", +"edu.et", +"biz.et", +"name.et", +"info.et", +"net.et", +"eu", +"fi", +"aland.fi", +"fj", +"ac.fj", +"biz.fj", +"com.fj", +"gov.fj", +"info.fj", +"mil.fj", +"name.fj", +"net.fj", +"org.fj", +"pro.fj", +"*.fk", +"com.fm", +"edu.fm", +"net.fm", +"org.fm", +"fm", +"fo", +"fr", +"asso.fr", +"com.fr", +"gouv.fr", +"nom.fr", +"prd.fr", +"tm.fr", +"aeroport.fr", +"avocat.fr", +"avoues.fr", +"cci.fr", +"chambagri.fr", +"chirurgiens-dentistes.fr", +"experts-comptables.fr", +"geometre-expert.fr", +"greta.fr", +"huissier-justice.fr", +"medecin.fr", +"notaires.fr", +"pharmacien.fr", +"port.fr", +"veterinaire.fr", +"ga", +"gb", +"edu.gd", +"gov.gd", +"gd", +"ge", +"com.ge", +"edu.ge", +"gov.ge", +"org.ge", +"mil.ge", +"net.ge", +"pvt.ge", +"gf", +"gg", +"co.gg", +"net.gg", +"org.gg", +"gh", +"com.gh", +"edu.gh", +"gov.gh", +"org.gh", +"mil.gh", +"gi", +"com.gi", +"ltd.gi", +"gov.gi", +"mod.gi", +"edu.gi", +"org.gi", +"gl", +"co.gl", +"com.gl", +"edu.gl", +"net.gl", +"org.gl", +"gm", +"gn", +"ac.gn", +"com.gn", +"edu.gn", +"gov.gn", +"org.gn", +"net.gn", +"gov", +"gp", +"com.gp", +"net.gp", +"mobi.gp", +"edu.gp", +"org.gp", +"asso.gp", +"gq", +"gr", +"com.gr", +"edu.gr", +"net.gr", +"org.gr", +"gov.gr", +"gs", +"gt", +"com.gt", +"edu.gt", +"gob.gt", +"ind.gt", +"mil.gt", +"net.gt", +"org.gt", +"gu", +"com.gu", +"edu.gu", +"gov.gu", +"guam.gu", +"info.gu", +"net.gu", +"org.gu", +"web.gu", +"gw", +"gy", +"co.gy", +"com.gy", +"edu.gy", +"gov.gy", +"net.gy", +"org.gy", +"hk", +"com.hk", +"edu.hk", +"gov.hk", +"idv.hk", +"net.hk", +"org.hk", +"公司.hk", +"教育.hk", +"敎育.hk", +"政府.hk", +"個人.hk", +"个��.hk", +"箇人.hk", +"網络.hk", +"网络.hk", +"组織.hk", +"網絡.hk", +"网絡.hk", +"组织.hk", +"組織.hk", +"組织.hk", +"hm", +"hn", +"com.hn", +"edu.hn", +"org.hn", +"net.hn", +"mil.hn", +"gob.hn", +"hr", +"iz.hr", +"from.hr", +"name.hr", +"com.hr", +"ht", +"com.ht", +"shop.ht", +"firm.ht", +"info.ht", +"adult.ht", +"net.ht", +"pro.ht", +"org.ht", +"med.ht", +"art.ht", +"coop.ht", +"pol.ht", +"asso.ht", +"edu.ht", +"rel.ht", +"gouv.ht", +"perso.ht", +"hu", +"co.hu", +"info.hu", +"org.hu", +"priv.hu", +"sport.hu", +"tm.hu", +"2000.hu", +"agrar.hu", +"bolt.hu", +"casino.hu", +"city.hu", +"erotica.hu", +"erotika.hu", +"film.hu", +"forum.hu", +"games.hu", +"hotel.hu", +"ingatlan.hu", +"jogasz.hu", +"konyvelo.hu", +"lakas.hu", +"media.hu", +"news.hu", +"reklam.hu", +"sex.hu", +"shop.hu", +"suli.hu", +"szex.hu", +"tozsde.hu", +"utazas.hu", +"video.hu", +"id", +"ac.id", +"biz.id", +"co.id", +"desa.id", +"go.id", +"mil.id", +"my.id", +"net.id", +"or.id", +"ponpes.id", +"sch.id", +"web.id", +"ie", +"gov.ie", +"il", +"ac.il", +"co.il", +"gov.il", +"idf.il", +"k12.il", +"muni.il", +"net.il", +"org.il", +"im", +"ac.im", +"co.im", +"com.im", +"ltd.co.im", +"net.im", +"org.im", +"plc.co.im", +"tt.im", +"tv.im", +"in", +"co.in", +"firm.in", +"net.in", +"org.in", +"gen.in", +"ind.in", +"nic.in", +"ac.in", +"edu.in", +"res.in", +"gov.in", +"mil.in", +"info", +"int", +"eu.int", +"io", +"com.io", +"iq", +"gov.iq", +"edu.iq", +"mil.iq", +"com.iq", +"org.iq", +"net.iq", +"ir", +"ac.ir", +"co.ir", +"gov.ir", +"id.ir", +"net.ir", +"org.ir", +"sch.ir", +"ایران.ir", +"ايران.ir", +"is", +"net.is", +"com.is", +"edu.is", +"gov.is", +"org.is", +"int.is", +"it", +"gov.it", +"edu.it", +"abr.it", +"abruzzo.it", +"aosta-valley.it", +"aostavalley.it", +"bas.it", +"basilicata.it", +"cal.it", +"calabria.it", +"cam.it", +"campania.it", +"emilia-romagna.it", +"emiliaromagna.it", +"emr.it", +"friuli-v-giulia.it", +"friuli-ve-giulia.it", +"friuli-vegiulia.it", +"friuli-venezia-giulia.it", +"friuli-veneziagiulia.it", +"friuli-vgiulia.it", +"friuliv-giulia.it", +"friulive-giulia.it", +"friulivegiulia.it", +"friulivenezia-giulia.it", +"friuliveneziagiulia.it", +"friulivgiulia.it", +"fvg.it", +"laz.it", +"lazio.it", +"lig.it", +"liguria.it", +"lom.it", +"lombardia.it", +"lombardy.it", +"lucania.it", +"mar.it", +"marche.it", +"mol.it", +"molise.it", +"piedmont.it", +"piemonte.it", +"pmn.it", +"pug.it", +"puglia.it", +"sar.it", +"sardegna.it", +"sardinia.it", +"sic.it", +"sicilia.it", +"sicily.it", +"taa.it", +"tos.it", +"toscana.it", +"trentin-sud-tirol.it", +"trentin-süd-tirol.it", +"trentin-sudtirol.it", +"trentin-südtirol.it", +"trentin-sued-tirol.it", +"trentin-suedtirol.it", +"trentino-a-adige.it", +"trentino-aadige.it", +"trentino-alto-adige.it", +"trentino-altoadige.it", +"trentino-s-tirol.it", +"trentino-stirol.it", +"trentino-sud-tirol.it", +"trentino-süd-tirol.it", +"trentino-sudtirol.it", +"trentino-südtirol.it", +"trentino-sued-tirol.it", +"trentino-suedtirol.it", +"trentino.it", +"trentinoa-adige.it", +"trentinoaadige.it", +"trentinoalto-adige.it", +"trentinoaltoadige.it", +"trentinos-tirol.it", +"trentinostirol.it", +"trentinosud-tirol.it", +"trentinosüd-tirol.it", +"trentinosudtirol.it", +"trentinosüdtirol.it", +"trentinosued-tirol.it", +"trentinosuedtirol.it", +"trentinsud-tirol.it", +"trentinsüd-tirol.it", +"trentinsudtirol.it", +"trentinsüdtirol.it", +"trentinsued-tirol.it", +"trentinsuedtirol.it", +"tuscany.it", +"umb.it", +"umbria.it", +"val-d-aosta.it", +"val-daosta.it", +"vald-aosta.it", +"valdaosta.it", +"valle-aosta.it", +"valle-d-aosta.it", +"valle-daosta.it", +"valleaosta.it", +"valled-aosta.it", +"valledaosta.it", +"vallee-aoste.it", +"vallée-aoste.it", +"vallee-d-aoste.it", +"vallée-d-aoste.it", +"valleeaoste.it", +"valléeaoste.it", +"valleedaoste.it", +"valléedaoste.it", +"vao.it", +"vda.it", +"ven.it", +"veneto.it", +"ag.it", +"agrigento.it", +"al.it", +"alessandria.it", +"alto-adige.it", +"altoadige.it", +"an.it", +"ancona.it", +"andria-barletta-trani.it", +"andria-trani-barletta.it", +"andriabarlettatrani.it", +"andriatranibarletta.it", +"ao.it", +"aosta.it", +"aoste.it", +"ap.it", +"aq.it", +"aquila.it", +"ar.it", +"arezzo.it", +"ascoli-piceno.it", +"ascolipiceno.it", +"asti.it", +"at.it", +"av.it", +"avellino.it", +"ba.it", +"balsan-sudtirol.it", +"balsan-südtirol.it", +"balsan-suedtirol.it", +"balsan.it", +"bari.it", +"barletta-trani-andria.it", +"barlettatraniandria.it", +"belluno.it", +"benevento.it", +"bergamo.it", +"bg.it", +"bi.it", +"biella.it", +"bl.it", +"bn.it", +"bo.it", +"bologna.it", +"bolzano-altoadige.it", +"bolzano.it", +"bozen-sudtirol.it", +"bozen-südtirol.it", +"bozen-suedtirol.it", +"bozen.it", +"br.it", +"brescia.it", +"brindisi.it", +"bs.it", +"bt.it", +"bulsan-sudtirol.it", +"bulsan-südtirol.it", +"bulsan-suedtirol.it", +"bulsan.it", +"bz.it", +"ca.it", +"cagliari.it", +"caltanissetta.it", +"campidano-medio.it", +"campidanomedio.it", +"campobasso.it", +"carbonia-iglesias.it", +"carboniaiglesias.it", +"carrara-massa.it", +"carraramassa.it", +"caserta.it", +"catania.it", +"catanzaro.it", +"cb.it", +"ce.it", +"cesena-forli.it", +"cesena-forlì.it", +"cesenaforli.it", +"cesenaforlì.it", +"ch.it", +"chieti.it", +"ci.it", +"cl.it", +"cn.it", +"co.it", +"como.it", +"cosenza.it", +"cr.it", +"cremona.it", +"crotone.it", +"cs.it", +"ct.it", +"cuneo.it", +"cz.it", +"dell-ogliastra.it", +"dellogliastra.it", +"en.it", +"enna.it", +"fc.it", +"fe.it", +"fermo.it", +"ferrara.it", +"fg.it", +"fi.it", +"firenze.it", +"florence.it", +"fm.it", +"foggia.it", +"forli-cesena.it", +"forlì-cesena.it", +"forlicesena.it", +"forlìcesena.it", +"fr.it", +"frosinone.it", +"ge.it", +"genoa.it", +"genova.it", +"go.it", +"gorizia.it", +"gr.it", +"grosseto.it", +"iglesias-carbonia.it", +"iglesiascarbonia.it", +"im.it", +"imperia.it", +"is.it", +"isernia.it", +"kr.it", +"la-spezia.it", +"laquila.it", +"laspezia.it", +"latina.it", +"lc.it", +"le.it", +"lecce.it", +"lecco.it", +"li.it", +"livorno.it", +"lo.it", +"lodi.it", +"lt.it", +"lu.it", +"lucca.it", +"macerata.it", +"mantova.it", +"massa-carrara.it", +"massacarrara.it", +"matera.it", +"mb.it", +"mc.it", +"me.it", +"medio-campidano.it", +"mediocampidano.it", +"messina.it", +"mi.it", +"milan.it", +"milano.it", +"mn.it", +"mo.it", +"modena.it", +"monza-brianza.it", +"monza-e-della-brianza.it", +"monza.it", +"monzabrianza.it", +"monzaebrianza.it", +"monzaedellabrianza.it", +"ms.it", +"mt.it", +"na.it", +"naples.it", +"napoli.it", +"no.it", +"novara.it", +"nu.it", +"nuoro.it", +"og.it", +"ogliastra.it", +"olbia-tempio.it", +"olbiatempio.it", +"or.it", +"oristano.it", +"ot.it", +"pa.it", +"padova.it", +"padua.it", +"palermo.it", +"parma.it", +"pavia.it", +"pc.it", +"pd.it", +"pe.it", +"perugia.it", +"pesaro-urbino.it", +"pesarourbino.it", +"pescara.it", +"pg.it", +"pi.it", +"piacenza.it", +"pisa.it", +"pistoia.it", +"pn.it", +"po.it", +"pordenone.it", +"potenza.it", +"pr.it", +"prato.it", +"pt.it", +"pu.it", +"pv.it", +"pz.it", +"ra.it", +"ragusa.it", +"ravenna.it", +"rc.it", +"re.it", +"reggio-calabria.it", +"reggio-emilia.it", +"reggiocalabria.it", +"reggioemilia.it", +"rg.it", +"ri.it", +"rieti.it", +"rimini.it", +"rm.it", +"rn.it", +"ro.it", +"roma.it", +"rome.it", +"rovigo.it", +"sa.it", +"salerno.it", +"sassari.it", +"savona.it", +"si.it", +"siena.it", +"siracusa.it", +"so.it", +"sondrio.it", +"sp.it", +"sr.it", +"ss.it", +"suedtirol.it", +"südtirol.it", +"sv.it", +"ta.it", +"taranto.it", +"te.it", +"tempio-olbia.it", +"tempioolbia.it", +"teramo.it", +"terni.it", +"tn.it", +"to.it", +"torino.it", +"tp.it", +"tr.it", +"trani-andria-barletta.it", +"trani-barletta-andria.it", +"traniandriabarletta.it", +"tranibarlettaandria.it", +"trapani.it", +"trento.it", +"treviso.it", +"trieste.it", +"ts.it", +"turin.it", +"tv.it", +"ud.it", +"udine.it", +"urbino-pesaro.it", +"urbinopesaro.it", +"va.it", +"varese.it", +"vb.it", +"vc.it", +"ve.it", +"venezia.it", +"venice.it", +"verbania.it", +"vercelli.it", +"verona.it", +"vi.it", +"vibo-valentia.it", +"vibovalentia.it", +"vicenza.it", +"viterbo.it", +"vr.it", +"vs.it", +"vt.it", +"vv.it", +"je", +"co.je", +"net.je", +"org.je", +"*.jm", +"jo", +"com.jo", +"org.jo", +"net.jo", +"edu.jo", +"sch.jo", +"gov.jo", +"mil.jo", +"name.jo", +"jobs", +"jp", +"ac.jp", +"ad.jp", +"co.jp", +"ed.jp", +"go.jp", +"gr.jp", +"lg.jp", +"ne.jp", +"or.jp", +"aichi.jp", +"akita.jp", +"aomori.jp", +"chiba.jp", +"ehime.jp", +"fukui.jp", +"fukuoka.jp", +"fukushima.jp", +"gifu.jp", +"gunma.jp", +"hiroshima.jp", +"hokkaido.jp", +"hyogo.jp", +"ibaraki.jp", +"ishikawa.jp", +"iwate.jp", +"kagawa.jp", +"kagoshima.jp", +"kanagawa.jp", +"kochi.jp", +"kumamoto.jp", +"kyoto.jp", +"mie.jp", +"miyagi.jp", +"miyazaki.jp", +"nagano.jp", +"nagasaki.jp", +"nara.jp", +"niigata.jp", +"oita.jp", +"okayama.jp", +"okinawa.jp", +"osaka.jp", +"saga.jp", +"saitama.jp", +"shiga.jp", +"shimane.jp", +"shizuoka.jp", +"tochigi.jp", +"tokushima.jp", +"tokyo.jp", +"tottori.jp", +"toyama.jp", +"wakayama.jp", +"yamagata.jp", +"yamaguchi.jp", +"yamanashi.jp", +"栃木.jp", +"愛知.jp", +"愛媛.jp", +"兵庫.jp", +"熊本.jp", +"茨城.jp", +"北海道.jp", +"千葉.jp", +"和歌山.jp", +"長崎.jp", +"長野.jp", +"新潟.jp", +"青森.jp", +"静岡.jp", +"東京.jp", +"石川.jp", +"埼玉.jp", +"三重.jp", +"京都.jp", +"佐賀.jp", +"大分.jp", +"大阪.jp", +"奈良.jp", +"宮城.jp", +"宮崎.jp", +"富山.jp", +"山口.jp", +"山形.jp", +"山梨.jp", +"岩手.jp", +"岐阜.jp", +"岡山.jp", +"島根.jp", +"広島.jp", +"徳島.jp", +"沖縄.jp", +"滋賀.jp", +"神奈川.jp", +"福井.jp", +"福岡.jp", +"福島.jp", +"秋田.jp", +"群馬.jp", +"香川.jp", +"高知.jp", +"鳥取.jp", +"鹿児島.jp", +"*.kawasaki.jp", +"*.kitakyushu.jp", +"*.kobe.jp", +"*.nagoya.jp", +"*.sapporo.jp", +"*.sendai.jp", +"*.yokohama.jp", +"!city.kawasaki.jp", +"!city.kitakyushu.jp", +"!city.kobe.jp", +"!city.nagoya.jp", +"!city.sapporo.jp", +"!city.sendai.jp", +"!city.yokohama.jp", +"aisai.aichi.jp", +"ama.aichi.jp", +"anjo.aichi.jp", +"asuke.aichi.jp", +"chiryu.aichi.jp", +"chita.aichi.jp", +"fuso.aichi.jp", +"gamagori.aichi.jp", +"handa.aichi.jp", +"hazu.aichi.jp", +"hekinan.aichi.jp", +"higashiura.aichi.jp", +"ichinomiya.aichi.jp", +"inazawa.aichi.jp", +"inuyama.aichi.jp", +"isshiki.aichi.jp", +"iwakura.aichi.jp", +"kanie.aichi.jp", +"kariya.aichi.jp", +"kasugai.aichi.jp", +"kira.aichi.jp", +"kiyosu.aichi.jp", +"komaki.aichi.jp", +"konan.aichi.jp", +"kota.aichi.jp", +"mihama.aichi.jp", +"miyoshi.aichi.jp", +"nishio.aichi.jp", +"nisshin.aichi.jp", +"obu.aichi.jp", +"oguchi.aichi.jp", +"oharu.aichi.jp", +"okazaki.aichi.jp", +"owariasahi.aichi.jp", +"seto.aichi.jp", +"shikatsu.aichi.jp", +"shinshiro.aichi.jp", +"shitara.aichi.jp", +"tahara.aichi.jp", +"takahama.aichi.jp", +"tobishima.aichi.jp", +"toei.aichi.jp", +"togo.aichi.jp", +"tokai.aichi.jp", +"tokoname.aichi.jp", +"toyoake.aichi.jp", +"toyohashi.aichi.jp", +"toyokawa.aichi.jp", +"toyone.aichi.jp", +"toyota.aichi.jp", +"tsushima.aichi.jp", +"yatomi.aichi.jp", +"akita.akita.jp", +"daisen.akita.jp", +"fujisato.akita.jp", +"gojome.akita.jp", +"hachirogata.akita.jp", +"happou.akita.jp", +"higashinaruse.akita.jp", +"honjo.akita.jp", +"honjyo.akita.jp", +"ikawa.akita.jp", +"kamikoani.akita.jp", +"kamioka.akita.jp", +"katagami.akita.jp", +"kazuno.akita.jp", +"kitaakita.akita.jp", +"kosaka.akita.jp", +"kyowa.akita.jp", +"misato.akita.jp", +"mitane.akita.jp", +"moriyoshi.akita.jp", +"nikaho.akita.jp", +"noshiro.akita.jp", +"odate.akita.jp", +"oga.akita.jp", +"ogata.akita.jp", +"semboku.akita.jp", +"yokote.akita.jp", +"yurihonjo.akita.jp", +"aomori.aomori.jp", +"gonohe.aomori.jp", +"hachinohe.aomori.jp", +"hashikami.aomori.jp", +"hiranai.aomori.jp", +"hirosaki.aomori.jp", +"itayanagi.aomori.jp", +"kuroishi.aomori.jp", +"misawa.aomori.jp", +"mutsu.aomori.jp", +"nakadomari.aomori.jp", +"noheji.aomori.jp", +"oirase.aomori.jp", +"owani.aomori.jp", +"rokunohe.aomori.jp", +"sannohe.aomori.jp", +"shichinohe.aomori.jp", +"shingo.aomori.jp", +"takko.aomori.jp", +"towada.aomori.jp", +"tsugaru.aomori.jp", +"tsuruta.aomori.jp", +"abiko.chiba.jp", +"asahi.chiba.jp", +"chonan.chiba.jp", +"chosei.chiba.jp", +"choshi.chiba.jp", +"chuo.chiba.jp", +"funabashi.chiba.jp", +"futtsu.chiba.jp", +"hanamigawa.chiba.jp", +"ichihara.chiba.jp", +"ichikawa.chiba.jp", +"ichinomiya.chiba.jp", +"inzai.chiba.jp", +"isumi.chiba.jp", +"kamagaya.chiba.jp", +"kamogawa.chiba.jp", +"kashiwa.chiba.jp", +"katori.chiba.jp", +"katsuura.chiba.jp", +"kimitsu.chiba.jp", +"kisarazu.chiba.jp", +"kozaki.chiba.jp", +"kujukuri.chiba.jp", +"kyonan.chiba.jp", +"matsudo.chiba.jp", +"midori.chiba.jp", +"mihama.chiba.jp", +"minamiboso.chiba.jp", +"mobara.chiba.jp", +"mutsuzawa.chiba.jp", +"nagara.chiba.jp", +"nagareyama.chiba.jp", +"narashino.chiba.jp", +"narita.chiba.jp", +"noda.chiba.jp", +"oamishirasato.chiba.jp", +"omigawa.chiba.jp", +"onjuku.chiba.jp", +"otaki.chiba.jp", +"sakae.chiba.jp", +"sakura.chiba.jp", +"shimofusa.chiba.jp", +"shirako.chiba.jp", +"shiroi.chiba.jp", +"shisui.chiba.jp", +"sodegaura.chiba.jp", +"sosa.chiba.jp", +"tako.chiba.jp", +"tateyama.chiba.jp", +"togane.chiba.jp", +"tohnosho.chiba.jp", +"tomisato.chiba.jp", +"urayasu.chiba.jp", +"yachimata.chiba.jp", +"yachiyo.chiba.jp", +"yokaichiba.chiba.jp", +"yokoshibahikari.chiba.jp", +"yotsukaido.chiba.jp", +"ainan.ehime.jp", +"honai.ehime.jp", +"ikata.ehime.jp", +"imabari.ehime.jp", +"iyo.ehime.jp", +"kamijima.ehime.jp", +"kihoku.ehime.jp", +"kumakogen.ehime.jp", +"masaki.ehime.jp", +"matsuno.ehime.jp", +"matsuyama.ehime.jp", +"namikata.ehime.jp", +"niihama.ehime.jp", +"ozu.ehime.jp", +"saijo.ehime.jp", +"seiyo.ehime.jp", +"shikokuchuo.ehime.jp", +"tobe.ehime.jp", +"toon.ehime.jp", +"uchiko.ehime.jp", +"uwajima.ehime.jp", +"yawatahama.ehime.jp", +"echizen.fukui.jp", +"eiheiji.fukui.jp", +"fukui.fukui.jp", +"ikeda.fukui.jp", +"katsuyama.fukui.jp", +"mihama.fukui.jp", +"minamiechizen.fukui.jp", +"obama.fukui.jp", +"ohi.fukui.jp", +"ono.fukui.jp", +"sabae.fukui.jp", +"sakai.fukui.jp", +"takahama.fukui.jp", +"tsuruga.fukui.jp", +"wakasa.fukui.jp", +"ashiya.fukuoka.jp", +"buzen.fukuoka.jp", +"chikugo.fukuoka.jp", +"chikuho.fukuoka.jp", +"chikujo.fukuoka.jp", +"chikushino.fukuoka.jp", +"chikuzen.fukuoka.jp", +"chuo.fukuoka.jp", +"dazaifu.fukuoka.jp", +"fukuchi.fukuoka.jp", +"hakata.fukuoka.jp", +"higashi.fukuoka.jp", +"hirokawa.fukuoka.jp", +"hisayama.fukuoka.jp", +"iizuka.fukuoka.jp", +"inatsuki.fukuoka.jp", +"kaho.fukuoka.jp", +"kasuga.fukuoka.jp", +"kasuya.fukuoka.jp", +"kawara.fukuoka.jp", +"keisen.fukuoka.jp", +"koga.fukuoka.jp", +"kurate.fukuoka.jp", +"kurogi.fukuoka.jp", +"kurume.fukuoka.jp", +"minami.fukuoka.jp", +"miyako.fukuoka.jp", +"miyama.fukuoka.jp", +"miyawaka.fukuoka.jp", +"mizumaki.fukuoka.jp", +"munakata.fukuoka.jp", +"nakagawa.fukuoka.jp", +"nakama.fukuoka.jp", +"nishi.fukuoka.jp", +"nogata.fukuoka.jp", +"ogori.fukuoka.jp", +"okagaki.fukuoka.jp", +"okawa.fukuoka.jp", +"oki.fukuoka.jp", +"omuta.fukuoka.jp", +"onga.fukuoka.jp", +"onojo.fukuoka.jp", +"oto.fukuoka.jp", +"saigawa.fukuoka.jp", +"sasaguri.fukuoka.jp", +"shingu.fukuoka.jp", +"shinyoshitomi.fukuoka.jp", +"shonai.fukuoka.jp", +"soeda.fukuoka.jp", +"sue.fukuoka.jp", +"tachiarai.fukuoka.jp", +"tagawa.fukuoka.jp", +"takata.fukuoka.jp", +"toho.fukuoka.jp", +"toyotsu.fukuoka.jp", +"tsuiki.fukuoka.jp", +"ukiha.fukuoka.jp", +"umi.fukuoka.jp", +"usui.fukuoka.jp", +"yamada.fukuoka.jp", +"yame.fukuoka.jp", +"yanagawa.fukuoka.jp", +"yukuhashi.fukuoka.jp", +"aizubange.fukushima.jp", +"aizumisato.fukushima.jp", +"aizuwakamatsu.fukushima.jp", +"asakawa.fukushima.jp", +"bandai.fukushima.jp", +"date.fukushima.jp", +"fukushima.fukushima.jp", +"furudono.fukushima.jp", +"futaba.fukushima.jp", +"hanawa.fukushima.jp", +"higashi.fukushima.jp", +"hirata.fukushima.jp", +"hirono.fukushima.jp", +"iitate.fukushima.jp", +"inawashiro.fukushima.jp", +"ishikawa.fukushima.jp", +"iwaki.fukushima.jp", +"izumizaki.fukushima.jp", +"kagamiishi.fukushima.jp", +"kaneyama.fukushima.jp", +"kawamata.fukushima.jp", +"kitakata.fukushima.jp", +"kitashiobara.fukushima.jp", +"koori.fukushima.jp", +"koriyama.fukushima.jp", +"kunimi.fukushima.jp", +"miharu.fukushima.jp", +"mishima.fukushima.jp", +"namie.fukushima.jp", +"nango.fukushima.jp", +"nishiaizu.fukushima.jp", +"nishigo.fukushima.jp", +"okuma.fukushima.jp", +"omotego.fukushima.jp", +"ono.fukushima.jp", +"otama.fukushima.jp", +"samegawa.fukushima.jp", +"shimogo.fukushima.jp", +"shirakawa.fukushima.jp", +"showa.fukushima.jp", +"soma.fukushima.jp", +"sukagawa.fukushima.jp", +"taishin.fukushima.jp", +"tamakawa.fukushima.jp", +"tanagura.fukushima.jp", +"tenei.fukushima.jp", +"yabuki.fukushima.jp", +"yamato.fukushima.jp", +"yamatsuri.fukushima.jp", +"yanaizu.fukushima.jp", +"yugawa.fukushima.jp", +"anpachi.gifu.jp", +"ena.gifu.jp", +"gifu.gifu.jp", +"ginan.gifu.jp", +"godo.gifu.jp", +"gujo.gifu.jp", +"hashima.gifu.jp", +"hichiso.gifu.jp", +"hida.gifu.jp", +"higashishirakawa.gifu.jp", +"ibigawa.gifu.jp", +"ikeda.gifu.jp", +"kakamigahara.gifu.jp", +"kani.gifu.jp", +"kasahara.gifu.jp", +"kasamatsu.gifu.jp", +"kawaue.gifu.jp", +"kitagata.gifu.jp", +"mino.gifu.jp", +"minokamo.gifu.jp", +"mitake.gifu.jp", +"mizunami.gifu.jp", +"motosu.gifu.jp", +"nakatsugawa.gifu.jp", +"ogaki.gifu.jp", +"sakahogi.gifu.jp", +"seki.gifu.jp", +"sekigahara.gifu.jp", +"shirakawa.gifu.jp", +"tajimi.gifu.jp", +"takayama.gifu.jp", +"tarui.gifu.jp", +"toki.gifu.jp", +"tomika.gifu.jp", +"wanouchi.gifu.jp", +"yamagata.gifu.jp", +"yaotsu.gifu.jp", +"yoro.gifu.jp", +"annaka.gunma.jp", +"chiyoda.gunma.jp", +"fujioka.gunma.jp", +"higashiagatsuma.gunma.jp", +"isesaki.gunma.jp", +"itakura.gunma.jp", +"kanna.gunma.jp", +"kanra.gunma.jp", +"katashina.gunma.jp", +"kawaba.gunma.jp", +"kiryu.gunma.jp", +"kusatsu.gunma.jp", +"maebashi.gunma.jp", +"meiwa.gunma.jp", +"midori.gunma.jp", +"minakami.gunma.jp", +"naganohara.gunma.jp", +"nakanojo.gunma.jp", +"nanmoku.gunma.jp", +"numata.gunma.jp", +"oizumi.gunma.jp", +"ora.gunma.jp", +"ota.gunma.jp", +"shibukawa.gunma.jp", +"shimonita.gunma.jp", +"shinto.gunma.jp", +"showa.gunma.jp", +"takasaki.gunma.jp", +"takayama.gunma.jp", +"tamamura.gunma.jp", +"tatebayashi.gunma.jp", +"tomioka.gunma.jp", +"tsukiyono.gunma.jp", +"tsumagoi.gunma.jp", +"ueno.gunma.jp", +"yoshioka.gunma.jp", +"asaminami.hiroshima.jp", +"daiwa.hiroshima.jp", +"etajima.hiroshima.jp", +"fuchu.hiroshima.jp", +"fukuyama.hiroshima.jp", +"hatsukaichi.hiroshima.jp", +"higashihiroshima.hiroshima.jp", +"hongo.hiroshima.jp", +"jinsekikogen.hiroshima.jp", +"kaita.hiroshima.jp", +"kui.hiroshima.jp", +"kumano.hiroshima.jp", +"kure.hiroshima.jp", +"mihara.hiroshima.jp", +"miyoshi.hiroshima.jp", +"naka.hiroshima.jp", +"onomichi.hiroshima.jp", +"osakikamijima.hiroshima.jp", +"otake.hiroshima.jp", +"saka.hiroshima.jp", +"sera.hiroshima.jp", +"seranishi.hiroshima.jp", +"shinichi.hiroshima.jp", +"shobara.hiroshima.jp", +"takehara.hiroshima.jp", +"abashiri.hokkaido.jp", +"abira.hokkaido.jp", +"aibetsu.hokkaido.jp", +"akabira.hokkaido.jp", +"akkeshi.hokkaido.jp", +"asahikawa.hokkaido.jp", +"ashibetsu.hokkaido.jp", +"ashoro.hokkaido.jp", +"assabu.hokkaido.jp", +"atsuma.hokkaido.jp", +"bibai.hokkaido.jp", +"biei.hokkaido.jp", +"bifuka.hokkaido.jp", +"bihoro.hokkaido.jp", +"biratori.hokkaido.jp", +"chippubetsu.hokkaido.jp", +"chitose.hokkaido.jp", +"date.hokkaido.jp", +"ebetsu.hokkaido.jp", +"embetsu.hokkaido.jp", +"eniwa.hokkaido.jp", +"erimo.hokkaido.jp", +"esan.hokkaido.jp", +"esashi.hokkaido.jp", +"fukagawa.hokkaido.jp", +"fukushima.hokkaido.jp", +"furano.hokkaido.jp", +"furubira.hokkaido.jp", +"haboro.hokkaido.jp", +"hakodate.hokkaido.jp", +"hamatonbetsu.hokkaido.jp", +"hidaka.hokkaido.jp", +"higashikagura.hokkaido.jp", +"higashikawa.hokkaido.jp", +"hiroo.hokkaido.jp", +"hokuryu.hokkaido.jp", +"hokuto.hokkaido.jp", +"honbetsu.hokkaido.jp", +"horokanai.hokkaido.jp", +"horonobe.hokkaido.jp", +"ikeda.hokkaido.jp", +"imakane.hokkaido.jp", +"ishikari.hokkaido.jp", +"iwamizawa.hokkaido.jp", +"iwanai.hokkaido.jp", +"kamifurano.hokkaido.jp", +"kamikawa.hokkaido.jp", +"kamishihoro.hokkaido.jp", +"kamisunagawa.hokkaido.jp", +"kamoenai.hokkaido.jp", +"kayabe.hokkaido.jp", +"kembuchi.hokkaido.jp", +"kikonai.hokkaido.jp", +"kimobetsu.hokkaido.jp", +"kitahiroshima.hokkaido.jp", +"kitami.hokkaido.jp", +"kiyosato.hokkaido.jp", +"koshimizu.hokkaido.jp", +"kunneppu.hokkaido.jp", +"kuriyama.hokkaido.jp", +"kuromatsunai.hokkaido.jp", +"kushiro.hokkaido.jp", +"kutchan.hokkaido.jp", +"kyowa.hokkaido.jp", +"mashike.hokkaido.jp", +"matsumae.hokkaido.jp", +"mikasa.hokkaido.jp", +"minamifurano.hokkaido.jp", +"mombetsu.hokkaido.jp", +"moseushi.hokkaido.jp", +"mukawa.hokkaido.jp", +"muroran.hokkaido.jp", +"naie.hokkaido.jp", +"nakagawa.hokkaido.jp", +"nakasatsunai.hokkaido.jp", +"nakatombetsu.hokkaido.jp", +"nanae.hokkaido.jp", +"nanporo.hokkaido.jp", +"nayoro.hokkaido.jp", +"nemuro.hokkaido.jp", +"niikappu.hokkaido.jp", +"niki.hokkaido.jp", +"nishiokoppe.hokkaido.jp", +"noboribetsu.hokkaido.jp", +"numata.hokkaido.jp", +"obihiro.hokkaido.jp", +"obira.hokkaido.jp", +"oketo.hokkaido.jp", +"okoppe.hokkaido.jp", +"otaru.hokkaido.jp", +"otobe.hokkaido.jp", +"otofuke.hokkaido.jp", +"otoineppu.hokkaido.jp", +"oumu.hokkaido.jp", +"ozora.hokkaido.jp", +"pippu.hokkaido.jp", +"rankoshi.hokkaido.jp", +"rebun.hokkaido.jp", +"rikubetsu.hokkaido.jp", +"rishiri.hokkaido.jp", +"rishirifuji.hokkaido.jp", +"saroma.hokkaido.jp", +"sarufutsu.hokkaido.jp", +"shakotan.hokkaido.jp", +"shari.hokkaido.jp", +"shibecha.hokkaido.jp", +"shibetsu.hokkaido.jp", +"shikabe.hokkaido.jp", +"shikaoi.hokkaido.jp", +"shimamaki.hokkaido.jp", +"shimizu.hokkaido.jp", +"shimokawa.hokkaido.jp", +"shinshinotsu.hokkaido.jp", +"shintoku.hokkaido.jp", +"shiranuka.hokkaido.jp", +"shiraoi.hokkaido.jp", +"shiriuchi.hokkaido.jp", +"sobetsu.hokkaido.jp", +"sunagawa.hokkaido.jp", +"taiki.hokkaido.jp", +"takasu.hokkaido.jp", +"takikawa.hokkaido.jp", +"takinoue.hokkaido.jp", +"teshikaga.hokkaido.jp", +"tobetsu.hokkaido.jp", +"tohma.hokkaido.jp", +"tomakomai.hokkaido.jp", +"tomari.hokkaido.jp", +"toya.hokkaido.jp", +"toyako.hokkaido.jp", +"toyotomi.hokkaido.jp", +"toyoura.hokkaido.jp", +"tsubetsu.hokkaido.jp", +"tsukigata.hokkaido.jp", +"urakawa.hokkaido.jp", +"urausu.hokkaido.jp", +"uryu.hokkaido.jp", +"utashinai.hokkaido.jp", +"wakkanai.hokkaido.jp", +"wassamu.hokkaido.jp", +"yakumo.hokkaido.jp", +"yoichi.hokkaido.jp", +"aioi.hyogo.jp", +"akashi.hyogo.jp", +"ako.hyogo.jp", +"amagasaki.hyogo.jp", +"aogaki.hyogo.jp", +"asago.hyogo.jp", +"ashiya.hyogo.jp", +"awaji.hyogo.jp", +"fukusaki.hyogo.jp", +"goshiki.hyogo.jp", +"harima.hyogo.jp", +"himeji.hyogo.jp", +"ichikawa.hyogo.jp", +"inagawa.hyogo.jp", +"itami.hyogo.jp", +"kakogawa.hyogo.jp", +"kamigori.hyogo.jp", +"kamikawa.hyogo.jp", +"kasai.hyogo.jp", +"kasuga.hyogo.jp", +"kawanishi.hyogo.jp", +"miki.hyogo.jp", +"minamiawaji.hyogo.jp", +"nishinomiya.hyogo.jp", +"nishiwaki.hyogo.jp", +"ono.hyogo.jp", +"sanda.hyogo.jp", +"sannan.hyogo.jp", +"sasayama.hyogo.jp", +"sayo.hyogo.jp", +"shingu.hyogo.jp", +"shinonsen.hyogo.jp", +"shiso.hyogo.jp", +"sumoto.hyogo.jp", +"taishi.hyogo.jp", +"taka.hyogo.jp", +"takarazuka.hyogo.jp", +"takasago.hyogo.jp", +"takino.hyogo.jp", +"tamba.hyogo.jp", +"tatsuno.hyogo.jp", +"toyooka.hyogo.jp", +"yabu.hyogo.jp", +"yashiro.hyogo.jp", +"yoka.hyogo.jp", +"yokawa.hyogo.jp", +"ami.ibaraki.jp", +"asahi.ibaraki.jp", +"bando.ibaraki.jp", +"chikusei.ibaraki.jp", +"daigo.ibaraki.jp", +"fujishiro.ibaraki.jp", +"hitachi.ibaraki.jp", +"hitachinaka.ibaraki.jp", +"hitachiomiya.ibaraki.jp", +"hitachiota.ibaraki.jp", +"ibaraki.ibaraki.jp", +"ina.ibaraki.jp", +"inashiki.ibaraki.jp", +"itako.ibaraki.jp", +"iwama.ibaraki.jp", +"joso.ibaraki.jp", +"kamisu.ibaraki.jp", +"kasama.ibaraki.jp", +"kashima.ibaraki.jp", +"kasumigaura.ibaraki.jp", +"koga.ibaraki.jp", +"miho.ibaraki.jp", +"mito.ibaraki.jp", +"moriya.ibaraki.jp", +"naka.ibaraki.jp", +"namegata.ibaraki.jp", +"oarai.ibaraki.jp", +"ogawa.ibaraki.jp", +"omitama.ibaraki.jp", +"ryugasaki.ibaraki.jp", +"sakai.ibaraki.jp", +"sakuragawa.ibaraki.jp", +"shimodate.ibaraki.jp", +"shimotsuma.ibaraki.jp", +"shirosato.ibaraki.jp", +"sowa.ibaraki.jp", +"suifu.ibaraki.jp", +"takahagi.ibaraki.jp", +"tamatsukuri.ibaraki.jp", +"tokai.ibaraki.jp", +"tomobe.ibaraki.jp", +"tone.ibaraki.jp", +"toride.ibaraki.jp", +"tsuchiura.ibaraki.jp", +"tsukuba.ibaraki.jp", +"uchihara.ibaraki.jp", +"ushiku.ibaraki.jp", +"yachiyo.ibaraki.jp", +"yamagata.ibaraki.jp", +"yawara.ibaraki.jp", +"yuki.ibaraki.jp", +"anamizu.ishikawa.jp", +"hakui.ishikawa.jp", +"hakusan.ishikawa.jp", +"kaga.ishikawa.jp", +"kahoku.ishikawa.jp", +"kanazawa.ishikawa.jp", +"kawakita.ishikawa.jp", +"komatsu.ishikawa.jp", +"nakanoto.ishikawa.jp", +"nanao.ishikawa.jp", +"nomi.ishikawa.jp", +"nonoichi.ishikawa.jp", +"noto.ishikawa.jp", +"shika.ishikawa.jp", +"suzu.ishikawa.jp", +"tsubata.ishikawa.jp", +"tsurugi.ishikawa.jp", +"uchinada.ishikawa.jp", +"wajima.ishikawa.jp", +"fudai.iwate.jp", +"fujisawa.iwate.jp", +"hanamaki.iwate.jp", +"hiraizumi.iwate.jp", +"hirono.iwate.jp", +"ichinohe.iwate.jp", +"ichinoseki.iwate.jp", +"iwaizumi.iwate.jp", +"iwate.iwate.jp", +"joboji.iwate.jp", +"kamaishi.iwate.jp", +"kanegasaki.iwate.jp", +"karumai.iwate.jp", +"kawai.iwate.jp", +"kitakami.iwate.jp", +"kuji.iwate.jp", +"kunohe.iwate.jp", +"kuzumaki.iwate.jp", +"miyako.iwate.jp", +"mizusawa.iwate.jp", +"morioka.iwate.jp", +"ninohe.iwate.jp", +"noda.iwate.jp", +"ofunato.iwate.jp", +"oshu.iwate.jp", +"otsuchi.iwate.jp", +"rikuzentakata.iwate.jp", +"shiwa.iwate.jp", +"shizukuishi.iwate.jp", +"sumita.iwate.jp", +"tanohata.iwate.jp", +"tono.iwate.jp", +"yahaba.iwate.jp", +"yamada.iwate.jp", +"ayagawa.kagawa.jp", +"higashikagawa.kagawa.jp", +"kanonji.kagawa.jp", +"kotohira.kagawa.jp", +"manno.kagawa.jp", +"marugame.kagawa.jp", +"mitoyo.kagawa.jp", +"naoshima.kagawa.jp", +"sanuki.kagawa.jp", +"tadotsu.kagawa.jp", +"takamatsu.kagawa.jp", +"tonosho.kagawa.jp", +"uchinomi.kagawa.jp", +"utazu.kagawa.jp", +"zentsuji.kagawa.jp", +"akune.kagoshima.jp", +"amami.kagoshima.jp", +"hioki.kagoshima.jp", +"isa.kagoshima.jp", +"isen.kagoshima.jp", +"izumi.kagoshima.jp", +"kagoshima.kagoshima.jp", +"kanoya.kagoshima.jp", +"kawanabe.kagoshima.jp", +"kinko.kagoshima.jp", +"kouyama.kagoshima.jp", +"makurazaki.kagoshima.jp", +"matsumoto.kagoshima.jp", +"minamitane.kagoshima.jp", +"nakatane.kagoshima.jp", +"nishinoomote.kagoshima.jp", +"satsumasendai.kagoshima.jp", +"soo.kagoshima.jp", +"tarumizu.kagoshima.jp", +"yusui.kagoshima.jp", +"aikawa.kanagawa.jp", +"atsugi.kanagawa.jp", +"ayase.kanagawa.jp", +"chigasaki.kanagawa.jp", +"ebina.kanagawa.jp", +"fujisawa.kanagawa.jp", +"hadano.kanagawa.jp", +"hakone.kanagawa.jp", +"hiratsuka.kanagawa.jp", +"isehara.kanagawa.jp", +"kaisei.kanagawa.jp", +"kamakura.kanagawa.jp", +"kiyokawa.kanagawa.jp", +"matsuda.kanagawa.jp", +"minamiashigara.kanagawa.jp", +"miura.kanagawa.jp", +"nakai.kanagawa.jp", +"ninomiya.kanagawa.jp", +"odawara.kanagawa.jp", +"oi.kanagawa.jp", +"oiso.kanagawa.jp", +"sagamihara.kanagawa.jp", +"samukawa.kanagawa.jp", +"tsukui.kanagawa.jp", +"yamakita.kanagawa.jp", +"yamato.kanagawa.jp", +"yokosuka.kanagawa.jp", +"yugawara.kanagawa.jp", +"zama.kanagawa.jp", +"zushi.kanagawa.jp", +"aki.kochi.jp", +"geisei.kochi.jp", +"hidaka.kochi.jp", +"higashitsuno.kochi.jp", +"ino.kochi.jp", +"kagami.kochi.jp", +"kami.kochi.jp", +"kitagawa.kochi.jp", +"kochi.kochi.jp", +"mihara.kochi.jp", +"motoyama.kochi.jp", +"muroto.kochi.jp", +"nahari.kochi.jp", +"nakamura.kochi.jp", +"nankoku.kochi.jp", +"nishitosa.kochi.jp", +"niyodogawa.kochi.jp", +"ochi.kochi.jp", +"okawa.kochi.jp", +"otoyo.kochi.jp", +"otsuki.kochi.jp", +"sakawa.kochi.jp", +"sukumo.kochi.jp", +"susaki.kochi.jp", +"tosa.kochi.jp", +"tosashimizu.kochi.jp", +"toyo.kochi.jp", +"tsuno.kochi.jp", +"umaji.kochi.jp", +"yasuda.kochi.jp", +"yusuhara.kochi.jp", +"amakusa.kumamoto.jp", +"arao.kumamoto.jp", +"aso.kumamoto.jp", +"choyo.kumamoto.jp", +"gyokuto.kumamoto.jp", +"kamiamakusa.kumamoto.jp", +"kikuchi.kumamoto.jp", +"kumamoto.kumamoto.jp", +"mashiki.kumamoto.jp", +"mifune.kumamoto.jp", +"minamata.kumamoto.jp", +"minamioguni.kumamoto.jp", +"nagasu.kumamoto.jp", +"nishihara.kumamoto.jp", +"oguni.kumamoto.jp", +"ozu.kumamoto.jp", +"sumoto.kumamoto.jp", +"takamori.kumamoto.jp", +"uki.kumamoto.jp", +"uto.kumamoto.jp", +"yamaga.kumamoto.jp", +"yamato.kumamoto.jp", +"yatsushiro.kumamoto.jp", +"ayabe.kyoto.jp", +"fukuchiyama.kyoto.jp", +"higashiyama.kyoto.jp", +"ide.kyoto.jp", +"ine.kyoto.jp", +"joyo.kyoto.jp", +"kameoka.kyoto.jp", +"kamo.kyoto.jp", +"kita.kyoto.jp", +"kizu.kyoto.jp", +"kumiyama.kyoto.jp", +"kyotamba.kyoto.jp", +"kyotanabe.kyoto.jp", +"kyotango.kyoto.jp", +"maizuru.kyoto.jp", +"minami.kyoto.jp", +"minamiyamashiro.kyoto.jp", +"miyazu.kyoto.jp", +"muko.kyoto.jp", +"nagaokakyo.kyoto.jp", +"nakagyo.kyoto.jp", +"nantan.kyoto.jp", +"oyamazaki.kyoto.jp", +"sakyo.kyoto.jp", +"seika.kyoto.jp", +"tanabe.kyoto.jp", +"uji.kyoto.jp", +"ujitawara.kyoto.jp", +"wazuka.kyoto.jp", +"yamashina.kyoto.jp", +"yawata.kyoto.jp", +"asahi.mie.jp", +"inabe.mie.jp", +"ise.mie.jp", +"kameyama.mie.jp", +"kawagoe.mie.jp", +"kiho.mie.jp", +"kisosaki.mie.jp", +"kiwa.mie.jp", +"komono.mie.jp", +"kumano.mie.jp", +"kuwana.mie.jp", +"matsusaka.mie.jp", +"meiwa.mie.jp", +"mihama.mie.jp", +"minamiise.mie.jp", +"misugi.mie.jp", +"miyama.mie.jp", +"nabari.mie.jp", +"shima.mie.jp", +"suzuka.mie.jp", +"tado.mie.jp", +"taiki.mie.jp", +"taki.mie.jp", +"tamaki.mie.jp", +"toba.mie.jp", +"tsu.mie.jp", +"udono.mie.jp", +"ureshino.mie.jp", +"watarai.mie.jp", +"yokkaichi.mie.jp", +"furukawa.miyagi.jp", +"higashimatsushima.miyagi.jp", +"ishinomaki.miyagi.jp", +"iwanuma.miyagi.jp", +"kakuda.miyagi.jp", +"kami.miyagi.jp", +"kawasaki.miyagi.jp", +"marumori.miyagi.jp", +"matsushima.miyagi.jp", +"minamisanriku.miyagi.jp", +"misato.miyagi.jp", +"murata.miyagi.jp", +"natori.miyagi.jp", +"ogawara.miyagi.jp", +"ohira.miyagi.jp", +"onagawa.miyagi.jp", +"osaki.miyagi.jp", +"rifu.miyagi.jp", +"semine.miyagi.jp", +"shibata.miyagi.jp", +"shichikashuku.miyagi.jp", +"shikama.miyagi.jp", +"shiogama.miyagi.jp", +"shiroishi.miyagi.jp", +"tagajo.miyagi.jp", +"taiwa.miyagi.jp", +"tome.miyagi.jp", +"tomiya.miyagi.jp", +"wakuya.miyagi.jp", +"watari.miyagi.jp", +"yamamoto.miyagi.jp", +"zao.miyagi.jp", +"aya.miyazaki.jp", +"ebino.miyazaki.jp", +"gokase.miyazaki.jp", +"hyuga.miyazaki.jp", +"kadogawa.miyazaki.jp", +"kawaminami.miyazaki.jp", +"kijo.miyazaki.jp", +"kitagawa.miyazaki.jp", +"kitakata.miyazaki.jp", +"kitaura.miyazaki.jp", +"kobayashi.miyazaki.jp", +"kunitomi.miyazaki.jp", +"kushima.miyazaki.jp", +"mimata.miyazaki.jp", +"miyakonojo.miyazaki.jp", +"miyazaki.miyazaki.jp", +"morotsuka.miyazaki.jp", +"nichinan.miyazaki.jp", +"nishimera.miyazaki.jp", +"nobeoka.miyazaki.jp", +"saito.miyazaki.jp", +"shiiba.miyazaki.jp", +"shintomi.miyazaki.jp", +"takaharu.miyazaki.jp", +"takanabe.miyazaki.jp", +"takazaki.miyazaki.jp", +"tsuno.miyazaki.jp", +"achi.nagano.jp", +"agematsu.nagano.jp", +"anan.nagano.jp", +"aoki.nagano.jp", +"asahi.nagano.jp", +"azumino.nagano.jp", +"chikuhoku.nagano.jp", +"chikuma.nagano.jp", +"chino.nagano.jp", +"fujimi.nagano.jp", +"hakuba.nagano.jp", +"hara.nagano.jp", +"hiraya.nagano.jp", +"iida.nagano.jp", +"iijima.nagano.jp", +"iiyama.nagano.jp", +"iizuna.nagano.jp", +"ikeda.nagano.jp", +"ikusaka.nagano.jp", +"ina.nagano.jp", +"karuizawa.nagano.jp", +"kawakami.nagano.jp", +"kiso.nagano.jp", +"kisofukushima.nagano.jp", +"kitaaiki.nagano.jp", +"komagane.nagano.jp", +"komoro.nagano.jp", +"matsukawa.nagano.jp", +"matsumoto.nagano.jp", +"miasa.nagano.jp", +"minamiaiki.nagano.jp", +"minamimaki.nagano.jp", +"minamiminowa.nagano.jp", +"minowa.nagano.jp", +"miyada.nagano.jp", +"miyota.nagano.jp", +"mochizuki.nagano.jp", +"nagano.nagano.jp", +"nagawa.nagano.jp", +"nagiso.nagano.jp", +"nakagawa.nagano.jp", +"nakano.nagano.jp", +"nozawaonsen.nagano.jp", +"obuse.nagano.jp", +"ogawa.nagano.jp", +"okaya.nagano.jp", +"omachi.nagano.jp", +"omi.nagano.jp", +"ookuwa.nagano.jp", +"ooshika.nagano.jp", +"otaki.nagano.jp", +"otari.nagano.jp", +"sakae.nagano.jp", +"sakaki.nagano.jp", +"saku.nagano.jp", +"sakuho.nagano.jp", +"shimosuwa.nagano.jp", +"shinanomachi.nagano.jp", +"shiojiri.nagano.jp", +"suwa.nagano.jp", +"suzaka.nagano.jp", +"takagi.nagano.jp", +"takamori.nagano.jp", +"takayama.nagano.jp", +"tateshina.nagano.jp", +"tatsuno.nagano.jp", +"togakushi.nagano.jp", +"togura.nagano.jp", +"tomi.nagano.jp", +"ueda.nagano.jp", +"wada.nagano.jp", +"yamagata.nagano.jp", +"yamanouchi.nagano.jp", +"yasaka.nagano.jp", +"yasuoka.nagano.jp", +"chijiwa.nagasaki.jp", +"futsu.nagasaki.jp", +"goto.nagasaki.jp", +"hasami.nagasaki.jp", +"hirado.nagasaki.jp", +"iki.nagasaki.jp", +"isahaya.nagasaki.jp", +"kawatana.nagasaki.jp", +"kuchinotsu.nagasaki.jp", +"matsuura.nagasaki.jp", +"nagasaki.nagasaki.jp", +"obama.nagasaki.jp", +"omura.nagasaki.jp", +"oseto.nagasaki.jp", +"saikai.nagasaki.jp", +"sasebo.nagasaki.jp", +"seihi.nagasaki.jp", +"shimabara.nagasaki.jp", +"shinkamigoto.nagasaki.jp", +"togitsu.nagasaki.jp", +"tsushima.nagasaki.jp", +"unzen.nagasaki.jp", +"ando.nara.jp", +"gose.nara.jp", +"heguri.nara.jp", +"higashiyoshino.nara.jp", +"ikaruga.nara.jp", +"ikoma.nara.jp", +"kamikitayama.nara.jp", +"kanmaki.nara.jp", +"kashiba.nara.jp", +"kashihara.nara.jp", +"katsuragi.nara.jp", +"kawai.nara.jp", +"kawakami.nara.jp", +"kawanishi.nara.jp", +"koryo.nara.jp", +"kurotaki.nara.jp", +"mitsue.nara.jp", +"miyake.nara.jp", +"nara.nara.jp", +"nosegawa.nara.jp", +"oji.nara.jp", +"ouda.nara.jp", +"oyodo.nara.jp", +"sakurai.nara.jp", +"sango.nara.jp", +"shimoichi.nara.jp", +"shimokitayama.nara.jp", +"shinjo.nara.jp", +"soni.nara.jp", +"takatori.nara.jp", +"tawaramoto.nara.jp", +"tenkawa.nara.jp", +"tenri.nara.jp", +"uda.nara.jp", +"yamatokoriyama.nara.jp", +"yamatotakada.nara.jp", +"yamazoe.nara.jp", +"yoshino.nara.jp", +"aga.niigata.jp", +"agano.niigata.jp", +"gosen.niigata.jp", +"itoigawa.niigata.jp", +"izumozaki.niigata.jp", +"joetsu.niigata.jp", +"kamo.niigata.jp", +"kariwa.niigata.jp", +"kashiwazaki.niigata.jp", +"minamiuonuma.niigata.jp", +"mitsuke.niigata.jp", +"muika.niigata.jp", +"murakami.niigata.jp", +"myoko.niigata.jp", +"nagaoka.niigata.jp", +"niigata.niigata.jp", +"ojiya.niigata.jp", +"omi.niigata.jp", +"sado.niigata.jp", +"sanjo.niigata.jp", +"seiro.niigata.jp", +"seirou.niigata.jp", +"sekikawa.niigata.jp", +"shibata.niigata.jp", +"tagami.niigata.jp", +"tainai.niigata.jp", +"tochio.niigata.jp", +"tokamachi.niigata.jp", +"tsubame.niigata.jp", +"tsunan.niigata.jp", +"uonuma.niigata.jp", +"yahiko.niigata.jp", +"yoita.niigata.jp", +"yuzawa.niigata.jp", +"beppu.oita.jp", +"bungoono.oita.jp", +"bungotakada.oita.jp", +"hasama.oita.jp", +"hiji.oita.jp", +"himeshima.oita.jp", +"hita.oita.jp", +"kamitsue.oita.jp", +"kokonoe.oita.jp", +"kuju.oita.jp", +"kunisaki.oita.jp", +"kusu.oita.jp", +"oita.oita.jp", +"saiki.oita.jp", +"taketa.oita.jp", +"tsukumi.oita.jp", +"usa.oita.jp", +"usuki.oita.jp", +"yufu.oita.jp", +"akaiwa.okayama.jp", +"asakuchi.okayama.jp", +"bizen.okayama.jp", +"hayashima.okayama.jp", +"ibara.okayama.jp", +"kagamino.okayama.jp", +"kasaoka.okayama.jp", +"kibichuo.okayama.jp", +"kumenan.okayama.jp", +"kurashiki.okayama.jp", +"maniwa.okayama.jp", +"misaki.okayama.jp", +"nagi.okayama.jp", +"niimi.okayama.jp", +"nishiawakura.okayama.jp", +"okayama.okayama.jp", +"satosho.okayama.jp", +"setouchi.okayama.jp", +"shinjo.okayama.jp", +"shoo.okayama.jp", +"soja.okayama.jp", +"takahashi.okayama.jp", +"tamano.okayama.jp", +"tsuyama.okayama.jp", +"wake.okayama.jp", +"yakage.okayama.jp", +"aguni.okinawa.jp", +"ginowan.okinawa.jp", +"ginoza.okinawa.jp", +"gushikami.okinawa.jp", +"haebaru.okinawa.jp", +"higashi.okinawa.jp", +"hirara.okinawa.jp", +"iheya.okinawa.jp", +"ishigaki.okinawa.jp", +"ishikawa.okinawa.jp", +"itoman.okinawa.jp", +"izena.okinawa.jp", +"kadena.okinawa.jp", +"kin.okinawa.jp", +"kitadaito.okinawa.jp", +"kitanakagusuku.okinawa.jp", +"kumejima.okinawa.jp", +"kunigami.okinawa.jp", +"minamidaito.okinawa.jp", +"motobu.okinawa.jp", +"nago.okinawa.jp", +"naha.okinawa.jp", +"nakagusuku.okinawa.jp", +"nakijin.okinawa.jp", +"nanjo.okinawa.jp", +"nishihara.okinawa.jp", +"ogimi.okinawa.jp", +"okinawa.okinawa.jp", +"onna.okinawa.jp", +"shimoji.okinawa.jp", +"taketomi.okinawa.jp", +"tarama.okinawa.jp", +"tokashiki.okinawa.jp", +"tomigusuku.okinawa.jp", +"tonaki.okinawa.jp", +"urasoe.okinawa.jp", +"uruma.okinawa.jp", +"yaese.okinawa.jp", +"yomitan.okinawa.jp", +"yonabaru.okinawa.jp", +"yonaguni.okinawa.jp", +"zamami.okinawa.jp", +"abeno.osaka.jp", +"chihayaakasaka.osaka.jp", +"chuo.osaka.jp", +"daito.osaka.jp", +"fujiidera.osaka.jp", +"habikino.osaka.jp", +"hannan.osaka.jp", +"higashiosaka.osaka.jp", +"higashisumiyoshi.osaka.jp", +"higashiyodogawa.osaka.jp", +"hirakata.osaka.jp", +"ibaraki.osaka.jp", +"ikeda.osaka.jp", +"izumi.osaka.jp", +"izumiotsu.osaka.jp", +"izumisano.osaka.jp", +"kadoma.osaka.jp", +"kaizuka.osaka.jp", +"kanan.osaka.jp", +"kashiwara.osaka.jp", +"katano.osaka.jp", +"kawachinagano.osaka.jp", +"kishiwada.osaka.jp", +"kita.osaka.jp", +"kumatori.osaka.jp", +"matsubara.osaka.jp", +"minato.osaka.jp", +"minoh.osaka.jp", +"misaki.osaka.jp", +"moriguchi.osaka.jp", +"neyagawa.osaka.jp", +"nishi.osaka.jp", +"nose.osaka.jp", +"osakasayama.osaka.jp", +"sakai.osaka.jp", +"sayama.osaka.jp", +"sennan.osaka.jp", +"settsu.osaka.jp", +"shijonawate.osaka.jp", +"shimamoto.osaka.jp", +"suita.osaka.jp", +"tadaoka.osaka.jp", +"taishi.osaka.jp", +"tajiri.osaka.jp", +"takaishi.osaka.jp", +"takatsuki.osaka.jp", +"tondabayashi.osaka.jp", +"toyonaka.osaka.jp", +"toyono.osaka.jp", +"yao.osaka.jp", +"ariake.saga.jp", +"arita.saga.jp", +"fukudomi.saga.jp", +"genkai.saga.jp", +"hamatama.saga.jp", +"hizen.saga.jp", +"imari.saga.jp", +"kamimine.saga.jp", +"kanzaki.saga.jp", +"karatsu.saga.jp", +"kashima.saga.jp", +"kitagata.saga.jp", +"kitahata.saga.jp", +"kiyama.saga.jp", +"kouhoku.saga.jp", +"kyuragi.saga.jp", +"nishiarita.saga.jp", +"ogi.saga.jp", +"omachi.saga.jp", +"ouchi.saga.jp", +"saga.saga.jp", +"shiroishi.saga.jp", +"taku.saga.jp", +"tara.saga.jp", +"tosu.saga.jp", +"yoshinogari.saga.jp", +"arakawa.saitama.jp", +"asaka.saitama.jp", +"chichibu.saitama.jp", +"fujimi.saitama.jp", +"fujimino.saitama.jp", +"fukaya.saitama.jp", +"hanno.saitama.jp", +"hanyu.saitama.jp", +"hasuda.saitama.jp", +"hatogaya.saitama.jp", +"hatoyama.saitama.jp", +"hidaka.saitama.jp", +"higashichichibu.saitama.jp", +"higashimatsuyama.saitama.jp", +"honjo.saitama.jp", +"ina.saitama.jp", +"iruma.saitama.jp", +"iwatsuki.saitama.jp", +"kamiizumi.saitama.jp", +"kamikawa.saitama.jp", +"kamisato.saitama.jp", +"kasukabe.saitama.jp", +"kawagoe.saitama.jp", +"kawaguchi.saitama.jp", +"kawajima.saitama.jp", +"kazo.saitama.jp", +"kitamoto.saitama.jp", +"koshigaya.saitama.jp", +"kounosu.saitama.jp", +"kuki.saitama.jp", +"kumagaya.saitama.jp", +"matsubushi.saitama.jp", +"minano.saitama.jp", +"misato.saitama.jp", +"miyashiro.saitama.jp", +"miyoshi.saitama.jp", +"moroyama.saitama.jp", +"nagatoro.saitama.jp", +"namegawa.saitama.jp", +"niiza.saitama.jp", +"ogano.saitama.jp", +"ogawa.saitama.jp", +"ogose.saitama.jp", +"okegawa.saitama.jp", +"omiya.saitama.jp", +"otaki.saitama.jp", +"ranzan.saitama.jp", +"ryokami.saitama.jp", +"saitama.saitama.jp", +"sakado.saitama.jp", +"satte.saitama.jp", +"sayama.saitama.jp", +"shiki.saitama.jp", +"shiraoka.saitama.jp", +"soka.saitama.jp", +"sugito.saitama.jp", +"toda.saitama.jp", +"tokigawa.saitama.jp", +"tokorozawa.saitama.jp", +"tsurugashima.saitama.jp", +"urawa.saitama.jp", +"warabi.saitama.jp", +"yashio.saitama.jp", +"yokoze.saitama.jp", +"yono.saitama.jp", +"yorii.saitama.jp", +"yoshida.saitama.jp", +"yoshikawa.saitama.jp", +"yoshimi.saitama.jp", +"aisho.shiga.jp", +"gamo.shiga.jp", +"higashiomi.shiga.jp", +"hikone.shiga.jp", +"koka.shiga.jp", +"konan.shiga.jp", +"kosei.shiga.jp", +"koto.shiga.jp", +"kusatsu.shiga.jp", +"maibara.shiga.jp", +"moriyama.shiga.jp", +"nagahama.shiga.jp", +"nishiazai.shiga.jp", +"notogawa.shiga.jp", +"omihachiman.shiga.jp", +"otsu.shiga.jp", +"ritto.shiga.jp", +"ryuoh.shiga.jp", +"takashima.shiga.jp", +"takatsuki.shiga.jp", +"torahime.shiga.jp", +"toyosato.shiga.jp", +"yasu.shiga.jp", +"akagi.shimane.jp", +"ama.shimane.jp", +"gotsu.shimane.jp", +"hamada.shimane.jp", +"higashiizumo.shimane.jp", +"hikawa.shimane.jp", +"hikimi.shimane.jp", +"izumo.shimane.jp", +"kakinoki.shimane.jp", +"masuda.shimane.jp", +"matsue.shimane.jp", +"misato.shimane.jp", +"nishinoshima.shimane.jp", +"ohda.shimane.jp", +"okinoshima.shimane.jp", +"okuizumo.shimane.jp", +"shimane.shimane.jp", +"tamayu.shimane.jp", +"tsuwano.shimane.jp", +"unnan.shimane.jp", +"yakumo.shimane.jp", +"yasugi.shimane.jp", +"yatsuka.shimane.jp", +"arai.shizuoka.jp", +"atami.shizuoka.jp", +"fuji.shizuoka.jp", +"fujieda.shizuoka.jp", +"fujikawa.shizuoka.jp", +"fujinomiya.shizuoka.jp", +"fukuroi.shizuoka.jp", +"gotemba.shizuoka.jp", +"haibara.shizuoka.jp", +"hamamatsu.shizuoka.jp", +"higashiizu.shizuoka.jp", +"ito.shizuoka.jp", +"iwata.shizuoka.jp", +"izu.shizuoka.jp", +"izunokuni.shizuoka.jp", +"kakegawa.shizuoka.jp", +"kannami.shizuoka.jp", +"kawanehon.shizuoka.jp", +"kawazu.shizuoka.jp", +"kikugawa.shizuoka.jp", +"kosai.shizuoka.jp", +"makinohara.shizuoka.jp", +"matsuzaki.shizuoka.jp", +"minamiizu.shizuoka.jp", +"mishima.shizuoka.jp", +"morimachi.shizuoka.jp", +"nishiizu.shizuoka.jp", +"numazu.shizuoka.jp", +"omaezaki.shizuoka.jp", +"shimada.shizuoka.jp", +"shimizu.shizuoka.jp", +"shimoda.shizuoka.jp", +"shizuoka.shizuoka.jp", +"susono.shizuoka.jp", +"yaizu.shizuoka.jp", +"yoshida.shizuoka.jp", +"ashikaga.tochigi.jp", +"bato.tochigi.jp", +"haga.tochigi.jp", +"ichikai.tochigi.jp", +"iwafune.tochigi.jp", +"kaminokawa.tochigi.jp", +"kanuma.tochigi.jp", +"karasuyama.tochigi.jp", +"kuroiso.tochigi.jp", +"mashiko.tochigi.jp", +"mibu.tochigi.jp", +"moka.tochigi.jp", +"motegi.tochigi.jp", +"nasu.tochigi.jp", +"nasushiobara.tochigi.jp", +"nikko.tochigi.jp", +"nishikata.tochigi.jp", +"nogi.tochigi.jp", +"ohira.tochigi.jp", +"ohtawara.tochigi.jp", +"oyama.tochigi.jp", +"sakura.tochigi.jp", +"sano.tochigi.jp", +"shimotsuke.tochigi.jp", +"shioya.tochigi.jp", +"takanezawa.tochigi.jp", +"tochigi.tochigi.jp", +"tsuga.tochigi.jp", +"ujiie.tochigi.jp", +"utsunomiya.tochigi.jp", +"yaita.tochigi.jp", +"aizumi.tokushima.jp", +"anan.tokushima.jp", +"ichiba.tokushima.jp", +"itano.tokushima.jp", +"kainan.tokushima.jp", +"komatsushima.tokushima.jp", +"matsushige.tokushima.jp", +"mima.tokushima.jp", +"minami.tokushima.jp", +"miyoshi.tokushima.jp", +"mugi.tokushima.jp", +"nakagawa.tokushima.jp", +"naruto.tokushima.jp", +"sanagochi.tokushima.jp", +"shishikui.tokushima.jp", +"tokushima.tokushima.jp", +"wajiki.tokushima.jp", +"adachi.tokyo.jp", +"akiruno.tokyo.jp", +"akishima.tokyo.jp", +"aogashima.tokyo.jp", +"arakawa.tokyo.jp", +"bunkyo.tokyo.jp", +"chiyoda.tokyo.jp", +"chofu.tokyo.jp", +"chuo.tokyo.jp", +"edogawa.tokyo.jp", +"fuchu.tokyo.jp", +"fussa.tokyo.jp", +"hachijo.tokyo.jp", +"hachioji.tokyo.jp", +"hamura.tokyo.jp", +"higashikurume.tokyo.jp", +"higashimurayama.tokyo.jp", +"higashiyamato.tokyo.jp", +"hino.tokyo.jp", +"hinode.tokyo.jp", +"hinohara.tokyo.jp", +"inagi.tokyo.jp", +"itabashi.tokyo.jp", +"katsushika.tokyo.jp", +"kita.tokyo.jp", +"kiyose.tokyo.jp", +"kodaira.tokyo.jp", +"koganei.tokyo.jp", +"kokubunji.tokyo.jp", +"komae.tokyo.jp", +"koto.tokyo.jp", +"kouzushima.tokyo.jp", +"kunitachi.tokyo.jp", +"machida.tokyo.jp", +"meguro.tokyo.jp", +"minato.tokyo.jp", +"mitaka.tokyo.jp", +"mizuho.tokyo.jp", +"musashimurayama.tokyo.jp", +"musashino.tokyo.jp", +"nakano.tokyo.jp", +"nerima.tokyo.jp", +"ogasawara.tokyo.jp", +"okutama.tokyo.jp", +"ome.tokyo.jp", +"oshima.tokyo.jp", +"ota.tokyo.jp", +"setagaya.tokyo.jp", +"shibuya.tokyo.jp", +"shinagawa.tokyo.jp", +"shinjuku.tokyo.jp", +"suginami.tokyo.jp", +"sumida.tokyo.jp", +"tachikawa.tokyo.jp", +"taito.tokyo.jp", +"tama.tokyo.jp", +"toshima.tokyo.jp", +"chizu.tottori.jp", +"hino.tottori.jp", +"kawahara.tottori.jp", +"koge.tottori.jp", +"kotoura.tottori.jp", +"misasa.tottori.jp", +"nanbu.tottori.jp", +"nichinan.tottori.jp", +"sakaiminato.tottori.jp", +"tottori.tottori.jp", +"wakasa.tottori.jp", +"yazu.tottori.jp", +"yonago.tottori.jp", +"asahi.toyama.jp", +"fuchu.toyama.jp", +"fukumitsu.toyama.jp", +"funahashi.toyama.jp", +"himi.toyama.jp", +"imizu.toyama.jp", +"inami.toyama.jp", +"johana.toyama.jp", +"kamiichi.toyama.jp", +"kurobe.toyama.jp", +"nakaniikawa.toyama.jp", +"namerikawa.toyama.jp", +"nanto.toyama.jp", +"nyuzen.toyama.jp", +"oyabe.toyama.jp", +"taira.toyama.jp", +"takaoka.toyama.jp", +"tateyama.toyama.jp", +"toga.toyama.jp", +"tonami.toyama.jp", +"toyama.toyama.jp", +"unazuki.toyama.jp", +"uozu.toyama.jp", +"yamada.toyama.jp", +"arida.wakayama.jp", +"aridagawa.wakayama.jp", +"gobo.wakayama.jp", +"hashimoto.wakayama.jp", +"hidaka.wakayama.jp", +"hirogawa.wakayama.jp", +"inami.wakayama.jp", +"iwade.wakayama.jp", +"kainan.wakayama.jp", +"kamitonda.wakayama.jp", +"katsuragi.wakayama.jp", +"kimino.wakayama.jp", +"kinokawa.wakayama.jp", +"kitayama.wakayama.jp", +"koya.wakayama.jp", +"koza.wakayama.jp", +"kozagawa.wakayama.jp", +"kudoyama.wakayama.jp", +"kushimoto.wakayama.jp", +"mihama.wakayama.jp", +"misato.wakayama.jp", +"nachikatsuura.wakayama.jp", +"shingu.wakayama.jp", +"shirahama.wakayama.jp", +"taiji.wakayama.jp", +"tanabe.wakayama.jp", +"wakayama.wakayama.jp", +"yuasa.wakayama.jp", +"yura.wakayama.jp", +"asahi.yamagata.jp", +"funagata.yamagata.jp", +"higashine.yamagata.jp", +"iide.yamagata.jp", +"kahoku.yamagata.jp", +"kaminoyama.yamagata.jp", +"kaneyama.yamagata.jp", +"kawanishi.yamagata.jp", +"mamurogawa.yamagata.jp", +"mikawa.yamagata.jp", +"murayama.yamagata.jp", +"nagai.yamagata.jp", +"nakayama.yamagata.jp", +"nanyo.yamagata.jp", +"nishikawa.yamagata.jp", +"obanazawa.yamagata.jp", +"oe.yamagata.jp", +"oguni.yamagata.jp", +"ohkura.yamagata.jp", +"oishida.yamagata.jp", +"sagae.yamagata.jp", +"sakata.yamagata.jp", +"sakegawa.yamagata.jp", +"shinjo.yamagata.jp", +"shirataka.yamagata.jp", +"shonai.yamagata.jp", +"takahata.yamagata.jp", +"tendo.yamagata.jp", +"tozawa.yamagata.jp", +"tsuruoka.yamagata.jp", +"yamagata.yamagata.jp", +"yamanobe.yamagata.jp", +"yonezawa.yamagata.jp", +"yuza.yamagata.jp", +"abu.yamaguchi.jp", +"hagi.yamaguchi.jp", +"hikari.yamaguchi.jp", +"hofu.yamaguchi.jp", +"iwakuni.yamaguchi.jp", +"kudamatsu.yamaguchi.jp", +"mitou.yamaguchi.jp", +"nagato.yamaguchi.jp", +"oshima.yamaguchi.jp", +"shimonoseki.yamaguchi.jp", +"shunan.yamaguchi.jp", +"tabuse.yamaguchi.jp", +"tokuyama.yamaguchi.jp", +"toyota.yamaguchi.jp", +"ube.yamaguchi.jp", +"yuu.yamaguchi.jp", +"chuo.yamanashi.jp", +"doshi.yamanashi.jp", +"fuefuki.yamanashi.jp", +"fujikawa.yamanashi.jp", +"fujikawaguchiko.yamanashi.jp", +"fujiyoshida.yamanashi.jp", +"hayakawa.yamanashi.jp", +"hokuto.yamanashi.jp", +"ichikawamisato.yamanashi.jp", +"kai.yamanashi.jp", +"kofu.yamanashi.jp", +"koshu.yamanashi.jp", +"kosuge.yamanashi.jp", +"minami-alps.yamanashi.jp", +"minobu.yamanashi.jp", +"nakamichi.yamanashi.jp", +"nanbu.yamanashi.jp", +"narusawa.yamanashi.jp", +"nirasaki.yamanashi.jp", +"nishikatsura.yamanashi.jp", +"oshino.yamanashi.jp", +"otsuki.yamanashi.jp", +"showa.yamanashi.jp", +"tabayama.yamanashi.jp", +"tsuru.yamanashi.jp", +"uenohara.yamanashi.jp", +"yamanakako.yamanashi.jp", +"yamanashi.yamanashi.jp", +"ke", +"ac.ke", +"co.ke", +"go.ke", +"info.ke", +"me.ke", +"mobi.ke", +"ne.ke", +"or.ke", +"sc.ke", +"kg", +"org.kg", +"net.kg", +"com.kg", +"edu.kg", +"gov.kg", +"mil.kg", +"*.kh", +"ki", +"edu.ki", +"biz.ki", +"net.ki", +"org.ki", +"gov.ki", +"info.ki", +"com.ki", +"km", +"org.km", +"nom.km", +"gov.km", +"prd.km", +"tm.km", +"edu.km", +"mil.km", +"ass.km", +"com.km", +"coop.km", +"asso.km", +"presse.km", +"medecin.km", +"notaires.km", +"pharmaciens.km", +"veterinaire.km", +"gouv.km", +"kn", +"net.kn", +"org.kn", +"edu.kn", +"gov.kn", +"kp", +"com.kp", +"edu.kp", +"gov.kp", +"org.kp", +"rep.kp", +"tra.kp", +"kr", +"ac.kr", +"co.kr", +"es.kr", +"go.kr", +"hs.kr", +"kg.kr", +"mil.kr", +"ms.kr", +"ne.kr", +"or.kr", +"pe.kr", +"re.kr", +"sc.kr", +"busan.kr", +"chungbuk.kr", +"chungnam.kr", +"daegu.kr", +"daejeon.kr", +"gangwon.kr", +"gwangju.kr", +"gyeongbuk.kr", +"gyeonggi.kr", +"gyeongnam.kr", +"incheon.kr", +"jeju.kr", +"jeonbuk.kr", +"jeonnam.kr", +"seoul.kr", +"ulsan.kr", +"kw", +"com.kw", +"edu.kw", +"emb.kw", +"gov.kw", +"ind.kw", +"net.kw", +"org.kw", +"ky", +"com.ky", +"edu.ky", +"net.ky", +"org.ky", +"kz", +"org.kz", +"edu.kz", +"net.kz", +"gov.kz", +"mil.kz", +"com.kz", +"la", +"int.la", +"net.la", +"info.la", +"edu.la", +"gov.la", +"per.la", +"com.la", +"org.la", +"lb", +"com.lb", +"edu.lb", +"gov.lb", +"net.lb", +"org.lb", +"lc", +"com.lc", +"net.lc", +"co.lc", +"org.lc", +"edu.lc", +"gov.lc", +"li", +"lk", +"gov.lk", +"sch.lk", +"net.lk", +"int.lk", +"com.lk", +"org.lk", +"edu.lk", +"ngo.lk", +"soc.lk", +"web.lk", +"ltd.lk", +"assn.lk", +"grp.lk", +"hotel.lk", +"ac.lk", +"lr", +"com.lr", +"edu.lr", +"gov.lr", +"org.lr", +"net.lr", +"ls", +"ac.ls", +"biz.ls", +"co.ls", +"edu.ls", +"gov.ls", +"info.ls", +"net.ls", +"org.ls", +"sc.ls", +"lt", +"gov.lt", +"lu", +"lv", +"com.lv", +"edu.lv", +"gov.lv", +"org.lv", +"mil.lv", +"id.lv", +"net.lv", +"asn.lv", +"conf.lv", +"ly", +"com.ly", +"net.ly", +"gov.ly", +"plc.ly", +"edu.ly", +"sch.ly", +"med.ly", +"org.ly", +"id.ly", +"ma", +"co.ma", +"net.ma", +"gov.ma", +"org.ma", +"ac.ma", +"press.ma", +"mc", +"tm.mc", +"asso.mc", +"md", +"me", +"co.me", +"net.me", +"org.me", +"edu.me", +"ac.me", +"gov.me", +"its.me", +"priv.me", +"mg", +"org.mg", +"nom.mg", +"gov.mg", +"prd.mg", +"tm.mg", +"edu.mg", +"mil.mg", +"com.mg", +"co.mg", +"mh", +"mil", +"mk", +"com.mk", +"org.mk", +"net.mk", +"edu.mk", +"gov.mk", +"inf.mk", +"name.mk", +"ml", +"com.ml", +"edu.ml", +"gouv.ml", +"gov.ml", +"net.ml", +"org.ml", +"presse.ml", +"*.mm", +"mn", +"gov.mn", +"edu.mn", +"org.mn", +"mo", +"com.mo", +"net.mo", +"org.mo", +"edu.mo", +"gov.mo", +"mobi", +"mp", +"mq", +"mr", +"gov.mr", +"ms", +"com.ms", +"edu.ms", +"gov.ms", +"net.ms", +"org.ms", +"mt", +"com.mt", +"edu.mt", +"net.mt", +"org.mt", +"mu", +"com.mu", +"net.mu", +"org.mu", +"gov.mu", +"ac.mu", +"co.mu", +"or.mu", +"museum", +"academy.museum", +"agriculture.museum", +"air.museum", +"airguard.museum", +"alabama.museum", +"alaska.museum", +"amber.museum", +"ambulance.museum", +"american.museum", +"americana.museum", +"americanantiques.museum", +"americanart.museum", +"amsterdam.museum", +"and.museum", +"annefrank.museum", +"anthro.museum", +"anthropology.museum", +"antiques.museum", +"aquarium.museum", +"arboretum.museum", +"archaeological.museum", +"archaeology.museum", +"architecture.museum", +"art.museum", +"artanddesign.museum", +"artcenter.museum", +"artdeco.museum", +"arteducation.museum", +"artgallery.museum", +"arts.museum", +"artsandcrafts.museum", +"asmatart.museum", +"assassination.museum", +"assisi.museum", +"association.museum", +"astronomy.museum", +"atlanta.museum", +"austin.museum", +"australia.museum", +"automotive.museum", +"aviation.museum", +"axis.museum", +"badajoz.museum", +"baghdad.museum", +"bahn.museum", +"bale.museum", +"baltimore.museum", +"barcelona.museum", +"baseball.museum", +"basel.museum", +"baths.museum", +"bauern.museum", +"beauxarts.museum", +"beeldengeluid.museum", +"bellevue.museum", +"bergbau.museum", +"berkeley.museum", +"berlin.museum", +"bern.museum", +"bible.museum", +"bilbao.museum", +"bill.museum", +"birdart.museum", +"birthplace.museum", +"bonn.museum", +"boston.museum", +"botanical.museum", +"botanicalgarden.museum", +"botanicgarden.museum", +"botany.museum", +"brandywinevalley.museum", +"brasil.museum", +"bristol.museum", +"british.museum", +"britishcolumbia.museum", +"broadcast.museum", +"brunel.museum", +"brussel.museum", +"brussels.museum", +"bruxelles.museum", +"building.museum", +"burghof.museum", +"bus.museum", +"bushey.museum", +"cadaques.museum", +"california.museum", +"cambridge.museum", +"can.museum", +"canada.museum", +"capebreton.museum", +"carrier.museum", +"cartoonart.museum", +"casadelamoneda.museum", +"castle.museum", +"castres.museum", +"celtic.museum", +"center.museum", +"chattanooga.museum", +"cheltenham.museum", +"chesapeakebay.museum", +"chicago.museum", +"children.museum", +"childrens.museum", +"childrensgarden.museum", +"chiropractic.museum", +"chocolate.museum", +"christiansburg.museum", +"cincinnati.museum", +"cinema.museum", +"circus.museum", +"civilisation.museum", +"civilization.museum", +"civilwar.museum", +"clinton.museum", +"clock.museum", +"coal.museum", +"coastaldefence.museum", +"cody.museum", +"coldwar.museum", +"collection.museum", +"colonialwilliamsburg.museum", +"coloradoplateau.museum", +"columbia.museum", +"columbus.museum", +"communication.museum", +"communications.museum", +"community.museum", +"computer.museum", +"computerhistory.museum", +"comunicações.museum", +"contemporary.museum", +"contemporaryart.museum", +"convent.museum", +"copenhagen.museum", +"corporation.museum", +"correios-e-telecomunicações.museum", +"corvette.museum", +"costume.museum", +"countryestate.museum", +"county.museum", +"crafts.museum", +"cranbrook.museum", +"creation.museum", +"cultural.museum", +"culturalcenter.museum", +"culture.museum", +"cyber.museum", +"cymru.museum", +"dali.museum", +"dallas.museum", +"database.museum", +"ddr.museum", +"decorativearts.museum", +"delaware.museum", +"delmenhorst.museum", +"denmark.museum", +"depot.museum", +"design.museum", +"detroit.museum", +"dinosaur.museum", +"discovery.museum", +"dolls.museum", +"donostia.museum", +"durham.museum", +"eastafrica.museum", +"eastcoast.museum", +"education.museum", +"educational.museum", +"egyptian.museum", +"eisenbahn.museum", +"elburg.museum", +"elvendrell.museum", +"embroidery.museum", +"encyclopedic.museum", +"england.museum", +"entomology.museum", +"environment.museum", +"environmentalconservation.museum", +"epilepsy.museum", +"essex.museum", +"estate.museum", +"ethnology.museum", +"exeter.museum", +"exhibition.museum", +"family.museum", +"farm.museum", +"farmequipment.museum", +"farmers.museum", +"farmstead.museum", +"field.museum", +"figueres.museum", +"filatelia.museum", +"film.museum", +"fineart.museum", +"finearts.museum", +"finland.museum", +"flanders.museum", +"florida.museum", +"force.museum", +"fortmissoula.museum", +"fortworth.museum", +"foundation.museum", +"francaise.museum", +"frankfurt.museum", +"franziskaner.museum", +"freemasonry.museum", +"freiburg.museum", +"fribourg.museum", +"frog.museum", +"fundacio.museum", +"furniture.museum", +"gallery.museum", +"garden.museum", +"gateway.museum", +"geelvinck.museum", +"gemological.museum", +"geology.museum", +"georgia.museum", +"giessen.museum", +"glas.museum", +"glass.museum", +"gorge.museum", +"grandrapids.museum", +"graz.museum", +"guernsey.museum", +"halloffame.museum", +"hamburg.museum", +"handson.museum", +"harvestcelebration.museum", +"hawaii.museum", +"health.museum", +"heimatunduhren.museum", +"hellas.museum", +"helsinki.museum", +"hembygdsforbund.museum", +"heritage.museum", +"histoire.museum", +"historical.museum", +"historicalsociety.museum", +"historichouses.museum", +"historisch.museum", +"historisches.museum", +"history.museum", +"historyofscience.museum", +"horology.museum", +"house.museum", +"humanities.museum", +"illustration.museum", +"imageandsound.museum", +"indian.museum", +"indiana.museum", +"indianapolis.museum", +"indianmarket.museum", +"intelligence.museum", +"interactive.museum", +"iraq.museum", +"iron.museum", +"isleofman.museum", +"jamison.museum", +"jefferson.museum", +"jerusalem.museum", +"jewelry.museum", +"jewish.museum", +"jewishart.museum", +"jfk.museum", +"journalism.museum", +"judaica.museum", +"judygarland.museum", +"juedisches.museum", +"juif.museum", +"karate.museum", +"karikatur.museum", +"kids.museum", +"koebenhavn.museum", +"koeln.museum", +"kunst.museum", +"kunstsammlung.museum", +"kunstunddesign.museum", +"labor.museum", +"labour.museum", +"lajolla.museum", +"lancashire.museum", +"landes.museum", +"lans.museum", +"läns.museum", +"larsson.museum", +"lewismiller.museum", +"lincoln.museum", +"linz.museum", +"living.museum", +"livinghistory.museum", +"localhistory.museum", +"london.museum", +"losangeles.museum", +"louvre.museum", +"loyalist.museum", +"lucerne.museum", +"luxembourg.museum", +"luzern.museum", +"mad.museum", +"madrid.museum", +"mallorca.museum", +"manchester.museum", +"mansion.museum", +"mansions.museum", +"manx.museum", +"marburg.museum", +"maritime.museum", +"maritimo.museum", +"maryland.museum", +"marylhurst.museum", +"media.museum", +"medical.museum", +"medizinhistorisches.museum", +"meeres.museum", +"memorial.museum", +"mesaverde.museum", +"michigan.museum", +"midatlantic.museum", +"military.museum", +"mill.museum", +"miners.museum", +"mining.museum", +"minnesota.museum", +"missile.museum", +"missoula.museum", +"modern.museum", +"moma.museum", +"money.museum", +"monmouth.museum", +"monticello.museum", +"montreal.museum", +"moscow.museum", +"motorcycle.museum", +"muenchen.museum", +"muenster.museum", +"mulhouse.museum", +"muncie.museum", +"museet.museum", +"museumcenter.museum", +"museumvereniging.museum", +"music.museum", +"national.museum", +"nationalfirearms.museum", +"nationalheritage.museum", +"nativeamerican.museum", +"naturalhistory.museum", +"naturalhistorymuseum.museum", +"naturalsciences.museum", +"nature.museum", +"naturhistorisches.museum", +"natuurwetenschappen.museum", +"naumburg.museum", +"naval.museum", +"nebraska.museum", +"neues.museum", +"newhampshire.museum", +"newjersey.museum", +"newmexico.museum", +"newport.museum", +"newspaper.museum", +"newyork.museum", +"niepce.museum", +"norfolk.museum", +"north.museum", +"nrw.museum", +"nyc.museum", +"nyny.museum", +"oceanographic.museum", +"oceanographique.museum", +"omaha.museum", +"online.museum", +"ontario.museum", +"openair.museum", +"oregon.museum", +"oregontrail.museum", +"otago.museum", +"oxford.museum", +"pacific.museum", +"paderborn.museum", +"palace.museum", +"paleo.museum", +"palmsprings.museum", +"panama.museum", +"paris.museum", +"pasadena.museum", +"pharmacy.museum", +"philadelphia.museum", +"philadelphiaarea.museum", +"philately.museum", +"phoenix.museum", +"photography.museum", +"pilots.museum", +"pittsburgh.museum", +"planetarium.museum", +"plantation.museum", +"plants.museum", +"plaza.museum", +"portal.museum", +"portland.museum", +"portlligat.museum", +"posts-and-telecommunications.museum", +"preservation.museum", +"presidio.museum", +"press.museum", +"project.museum", +"public.museum", +"pubol.museum", +"quebec.museum", +"railroad.museum", +"railway.museum", +"research.museum", +"resistance.museum", +"riodejaneiro.museum", +"rochester.museum", +"rockart.museum", +"roma.museum", +"russia.museum", +"saintlouis.museum", +"salem.museum", +"salvadordali.museum", +"salzburg.museum", +"sandiego.museum", +"sanfrancisco.museum", +"santabarbara.museum", +"santacruz.museum", +"santafe.museum", +"saskatchewan.museum", +"satx.museum", +"savannahga.museum", +"schlesisches.museum", +"schoenbrunn.museum", +"schokoladen.museum", +"school.museum", +"schweiz.museum", +"science.museum", +"scienceandhistory.museum", +"scienceandindustry.museum", +"sciencecenter.museum", +"sciencecenters.museum", +"science-fiction.museum", +"sciencehistory.museum", +"sciences.museum", +"sciencesnaturelles.museum", +"scotland.museum", +"seaport.museum", +"settlement.museum", +"settlers.museum", +"shell.museum", +"sherbrooke.museum", +"sibenik.museum", +"silk.museum", +"ski.museum", +"skole.museum", +"society.museum", +"sologne.museum", +"soundandvision.museum", +"southcarolina.museum", +"southwest.museum", +"space.museum", +"spy.museum", +"square.museum", +"stadt.museum", +"stalbans.museum", +"starnberg.museum", +"state.museum", +"stateofdelaware.museum", +"station.museum", +"steam.museum", +"steiermark.museum", +"stjohn.museum", +"stockholm.museum", +"stpetersburg.museum", +"stuttgart.museum", +"suisse.museum", +"surgeonshall.museum", +"surrey.museum", +"svizzera.museum", +"sweden.museum", +"sydney.museum", +"tank.museum", +"tcm.museum", +"technology.museum", +"telekommunikation.museum", +"television.museum", +"texas.museum", +"textile.museum", +"theater.museum", +"time.museum", +"timekeeping.museum", +"topology.museum", +"torino.museum", +"touch.museum", +"town.museum", +"transport.museum", +"tree.museum", +"trolley.museum", +"trust.museum", +"trustee.museum", +"uhren.museum", +"ulm.museum", +"undersea.museum", +"university.museum", +"usa.museum", +"usantiques.museum", +"usarts.museum", +"uscountryestate.museum", +"usculture.museum", +"usdecorativearts.museum", +"usgarden.museum", +"ushistory.museum", +"ushuaia.museum", +"uslivinghistory.museum", +"utah.museum", +"uvic.museum", +"valley.museum", +"vantaa.museum", +"versailles.museum", +"viking.museum", +"village.museum", +"virginia.museum", +"virtual.museum", +"virtuel.museum", +"vlaanderen.museum", +"volkenkunde.museum", +"wales.museum", +"wallonie.museum", +"war.museum", +"washingtondc.museum", +"watchandclock.museum", +"watch-and-clock.museum", +"western.museum", +"westfalen.museum", +"whaling.museum", +"wildlife.museum", +"williamsburg.museum", +"windmill.museum", +"workshop.museum", +"york.museum", +"yorkshire.museum", +"yosemite.museum", +"youth.museum", +"zoological.museum", +"zoology.museum", +"ירושלים.museum", +"иком.museum", +"mv", +"aero.mv", +"biz.mv", +"com.mv", +"coop.mv", +"edu.mv", +"gov.mv", +"info.mv", +"int.mv", +"mil.mv", +"museum.mv", +"name.mv", +"net.mv", +"org.mv", +"pro.mv", +"mw", +"ac.mw", +"biz.mw", +"co.mw", +"com.mw", +"coop.mw", +"edu.mw", +"gov.mw", +"int.mw", +"museum.mw", +"net.mw", +"org.mw", +"mx", +"com.mx", +"org.mx", +"gob.mx", +"edu.mx", +"net.mx", +"my", +"biz.my", +"com.my", +"edu.my", +"gov.my", +"mil.my", +"name.my", +"net.my", +"org.my", +"mz", +"ac.mz", +"adv.mz", +"co.mz", +"edu.mz", +"gov.mz", +"mil.mz", +"net.mz", +"org.mz", +"na", +"info.na", +"pro.na", +"name.na", +"school.na", +"or.na", +"dr.na", +"us.na", +"mx.na", +"ca.na", +"in.na", +"cc.na", +"tv.na", +"ws.na", +"mobi.na", +"co.na", +"com.na", +"org.na", +"name", +"nc", +"asso.nc", +"nom.nc", +"ne", +"net", +"nf", +"com.nf", +"net.nf", +"per.nf", +"rec.nf", +"web.nf", +"arts.nf", +"firm.nf", +"info.nf", +"other.nf", +"store.nf", +"ng", +"com.ng", +"edu.ng", +"gov.ng", +"i.ng", +"mil.ng", +"mobi.ng", +"name.ng", +"net.ng", +"org.ng", +"sch.ng", +"ni", +"ac.ni", +"biz.ni", +"co.ni", +"com.ni", +"edu.ni", +"gob.ni", +"in.ni", +"info.ni", +"int.ni", +"mil.ni", +"net.ni", +"nom.ni", +"org.ni", +"web.ni", +"nl", +"no", +"fhs.no", +"vgs.no", +"fylkesbibl.no", +"folkebibl.no", +"museum.no", +"idrett.no", +"priv.no", +"mil.no", +"stat.no", +"dep.no", +"kommune.no", +"herad.no", +"aa.no", +"ah.no", +"bu.no", +"fm.no", +"hl.no", +"hm.no", +"jan-mayen.no", +"mr.no", +"nl.no", +"nt.no", +"of.no", +"ol.no", +"oslo.no", +"rl.no", +"sf.no", +"st.no", +"svalbard.no", +"tm.no", +"tr.no", +"va.no", +"vf.no", +"gs.aa.no", +"gs.ah.no", +"gs.bu.no", +"gs.fm.no", +"gs.hl.no", +"gs.hm.no", +"gs.jan-mayen.no", +"gs.mr.no", +"gs.nl.no", +"gs.nt.no", +"gs.of.no", +"gs.ol.no", +"gs.oslo.no", +"gs.rl.no", +"gs.sf.no", +"gs.st.no", +"gs.svalbard.no", +"gs.tm.no", +"gs.tr.no", +"gs.va.no", +"gs.vf.no", +"akrehamn.no", +"åkrehamn.no", +"algard.no", +"ålgård.no", +"arna.no", +"brumunddal.no", +"bryne.no", +"bronnoysund.no", +"brønnøysund.no", +"drobak.no", +"drøbak.no", +"egersund.no", +"fetsund.no", +"floro.no", +"florø.no", +"fredrikstad.no", +"hokksund.no", +"honefoss.no", +"hønefoss.no", +"jessheim.no", +"jorpeland.no", +"jørpeland.no", +"kirkenes.no", +"kopervik.no", +"krokstadelva.no", +"langevag.no", +"langevåg.no", +"leirvik.no", +"mjondalen.no", +"mjøndalen.no", +"mo-i-rana.no", +"mosjoen.no", +"mosjøen.no", +"nesoddtangen.no", +"orkanger.no", +"osoyro.no", +"osøyro.no", +"raholt.no", +"råholt.no", +"sandnessjoen.no", +"sandnessjøen.no", +"skedsmokorset.no", +"slattum.no", +"spjelkavik.no", +"stathelle.no", +"stavern.no", +"stjordalshalsen.no", +"stjørdalshalsen.no", +"tananger.no", +"tranby.no", +"vossevangen.no", +"afjord.no", +"åfjord.no", +"agdenes.no", +"al.no", +"ål.no", +"alesund.no", +"ålesund.no", +"alstahaug.no", +"alta.no", +"áltá.no", +"alaheadju.no", +"álaheadju.no", +"alvdal.no", +"amli.no", +"åmli.no", +"amot.no", +"åmot.no", +"andebu.no", +"andoy.no", +"andøy.no", +"andasuolo.no", +"ardal.no", +"årdal.no", +"aremark.no", +"arendal.no", +"ås.no", +"aseral.no", +"åseral.no", +"asker.no", +"askim.no", +"askvoll.no", +"askoy.no", +"askøy.no", +"asnes.no", +"åsnes.no", +"audnedaln.no", +"aukra.no", +"aure.no", +"aurland.no", +"aurskog-holand.no", +"aurskog-høland.no", +"austevoll.no", +"austrheim.no", +"averoy.no", +"averøy.no", +"balestrand.no", +"ballangen.no", +"balat.no", +"bálát.no", +"balsfjord.no", +"bahccavuotna.no", +"báhccavuotna.no", +"bamble.no", +"bardu.no", +"beardu.no", +"beiarn.no", +"bajddar.no", +"bájddar.no", +"baidar.no", +"báidár.no", +"berg.no", +"bergen.no", +"berlevag.no", +"berlevåg.no", +"bearalvahki.no", +"bearalváhki.no", +"bindal.no", +"birkenes.no", +"bjarkoy.no", +"bjarkøy.no", +"bjerkreim.no", +"bjugn.no", +"bodo.no", +"bodø.no", +"badaddja.no", +"bådåddjå.no", +"budejju.no", +"bokn.no", +"bremanger.no", +"bronnoy.no", +"brønnøy.no", +"bygland.no", +"bykle.no", +"barum.no", +"bærum.no", +"bo.telemark.no", +"bø.telemark.no", +"bo.nordland.no", +"bø.nordland.no", +"bievat.no", +"bievát.no", +"bomlo.no", +"bømlo.no", +"batsfjord.no", +"båtsfjord.no", +"bahcavuotna.no", +"báhcavuotna.no", +"dovre.no", +"drammen.no", +"drangedal.no", +"dyroy.no", +"dyrøy.no", +"donna.no", +"dønna.no", +"eid.no", +"eidfjord.no", +"eidsberg.no", +"eidskog.no", +"eidsvoll.no", +"eigersund.no", +"elverum.no", +"enebakk.no", +"engerdal.no", +"etne.no", +"etnedal.no", +"evenes.no", +"evenassi.no", +"evenášši.no", +"evje-og-hornnes.no", +"farsund.no", +"fauske.no", +"fuossko.no", +"fuoisku.no", +"fedje.no", +"fet.no", +"finnoy.no", +"finnøy.no", +"fitjar.no", +"fjaler.no", +"fjell.no", +"flakstad.no", +"flatanger.no", +"flekkefjord.no", +"flesberg.no", +"flora.no", +"fla.no", +"flå.no", +"folldal.no", +"forsand.no", +"fosnes.no", +"frei.no", +"frogn.no", +"froland.no", +"frosta.no", +"frana.no", +"fræna.no", +"froya.no", +"frøya.no", +"fusa.no", +"fyresdal.no", +"forde.no", +"førde.no", +"gamvik.no", +"gangaviika.no", +"gáŋgaviika.no", +"gaular.no", +"gausdal.no", +"gildeskal.no", +"gildeskål.no", +"giske.no", +"gjemnes.no", +"gjerdrum.no", +"gjerstad.no", +"gjesdal.no", +"gjovik.no", +"gjøvik.no", +"gloppen.no", +"gol.no", +"gran.no", +"grane.no", +"granvin.no", +"gratangen.no", +"grimstad.no", +"grong.no", +"kraanghke.no", +"kråanghke.no", +"grue.no", +"gulen.no", +"hadsel.no", +"halden.no", +"halsa.no", +"hamar.no", +"hamaroy.no", +"habmer.no", +"hábmer.no", +"hapmir.no", +"hápmir.no", +"hammerfest.no", +"hammarfeasta.no", +"hámmárfeasta.no", +"haram.no", +"hareid.no", +"harstad.no", +"hasvik.no", +"aknoluokta.no", +"ákŋoluokta.no", +"hattfjelldal.no", +"aarborte.no", +"haugesund.no", +"hemne.no", +"hemnes.no", +"hemsedal.no", +"heroy.more-og-romsdal.no", +"herøy.møre-og-romsdal.no", +"heroy.nordland.no", +"herøy.nordland.no", +"hitra.no", +"hjartdal.no", +"hjelmeland.no", +"hobol.no", +"hobøl.no", +"hof.no", +"hol.no", +"hole.no", +"holmestrand.no", +"holtalen.no", +"holtålen.no", +"hornindal.no", +"horten.no", +"hurdal.no", +"hurum.no", +"hvaler.no", +"hyllestad.no", +"hagebostad.no", +"hægebostad.no", +"hoyanger.no", +"høyanger.no", +"hoylandet.no", +"høylandet.no", +"ha.no", +"hå.no", +"ibestad.no", +"inderoy.no", +"inderøy.no", +"iveland.no", +"jevnaker.no", +"jondal.no", +"jolster.no", +"jølster.no", +"karasjok.no", +"karasjohka.no", +"kárášjohka.no", +"karlsoy.no", +"galsa.no", +"gálsá.no", +"karmoy.no", +"karmøy.no", +"kautokeino.no", +"guovdageaidnu.no", +"klepp.no", +"klabu.no", +"klæbu.no", +"kongsberg.no", +"kongsvinger.no", +"kragero.no", +"kragerø.no", +"kristiansand.no", +"kristiansund.no", +"krodsherad.no", +"krødsherad.no", +"kvalsund.no", +"rahkkeravju.no", +"ráhkkerávju.no", +"kvam.no", +"kvinesdal.no", +"kvinnherad.no", +"kviteseid.no", +"kvitsoy.no", +"kvitsøy.no", +"kvafjord.no", +"kvæfjord.no", +"giehtavuoatna.no", +"kvanangen.no", +"kvænangen.no", +"navuotna.no", +"návuotna.no", +"kafjord.no", +"kåfjord.no", +"gaivuotna.no", +"gáivuotna.no", +"larvik.no", +"lavangen.no", +"lavagis.no", +"loabat.no", +"loabát.no", +"lebesby.no", +"davvesiida.no", +"leikanger.no", +"leirfjord.no", +"leka.no", +"leksvik.no", +"lenvik.no", +"leangaviika.no", +"leaŋgaviika.no", +"lesja.no", +"levanger.no", +"lier.no", +"lierne.no", +"lillehammer.no", +"lillesand.no", +"lindesnes.no", +"lindas.no", +"lindås.no", +"lom.no", +"loppa.no", +"lahppi.no", +"láhppi.no", +"lund.no", +"lunner.no", +"luroy.no", +"lurøy.no", +"luster.no", +"lyngdal.no", +"lyngen.no", +"ivgu.no", +"lardal.no", +"lerdal.no", +"lærdal.no", +"lodingen.no", +"lødingen.no", +"lorenskog.no", +"lørenskog.no", +"loten.no", +"løten.no", +"malvik.no", +"masoy.no", +"måsøy.no", +"muosat.no", +"muosát.no", +"mandal.no", +"marker.no", +"marnardal.no", +"masfjorden.no", +"meland.no", +"meldal.no", +"melhus.no", +"meloy.no", +"meløy.no", +"meraker.no", +"meråker.no", +"moareke.no", +"moåreke.no", +"midsund.no", +"midtre-gauldal.no", +"modalen.no", +"modum.no", +"molde.no", +"moskenes.no", +"moss.no", +"mosvik.no", +"malselv.no", +"målselv.no", +"malatvuopmi.no", +"málatvuopmi.no", +"namdalseid.no", +"aejrie.no", +"namsos.no", +"namsskogan.no", +"naamesjevuemie.no", +"nååmesjevuemie.no", +"laakesvuemie.no", +"nannestad.no", +"narvik.no", +"narviika.no", +"naustdal.no", +"nedre-eiker.no", +"nes.akershus.no", +"nes.buskerud.no", +"nesna.no", +"nesodden.no", +"nesseby.no", +"unjarga.no", +"unjárga.no", +"nesset.no", +"nissedal.no", +"nittedal.no", +"nord-aurdal.no", +"nord-fron.no", +"nord-odal.no", +"norddal.no", +"nordkapp.no", +"davvenjarga.no", +"davvenjárga.no", +"nordre-land.no", +"nordreisa.no", +"raisa.no", +"ráisa.no", +"nore-og-uvdal.no", +"notodden.no", +"naroy.no", +"nærøy.no", +"notteroy.no", +"nøtterøy.no", +"odda.no", +"oksnes.no", +"øksnes.no", +"oppdal.no", +"oppegard.no", +"oppegård.no", +"orkdal.no", +"orland.no", +"ørland.no", +"orskog.no", +"ørskog.no", +"orsta.no", +"ørsta.no", +"os.hedmark.no", +"os.hordaland.no", +"osen.no", +"osteroy.no", +"osterøy.no", +"ostre-toten.no", +"østre-toten.no", +"overhalla.no", +"ovre-eiker.no", +"øvre-eiker.no", +"oyer.no", +"øyer.no", +"oygarden.no", +"øygarden.no", +"oystre-slidre.no", +"øystre-slidre.no", +"porsanger.no", +"porsangu.no", +"porsáŋgu.no", +"porsgrunn.no", +"radoy.no", +"radøy.no", +"rakkestad.no", +"rana.no", +"ruovat.no", +"randaberg.no", +"rauma.no", +"rendalen.no", +"rennebu.no", +"rennesoy.no", +"rennesøy.no", +"rindal.no", +"ringebu.no", +"ringerike.no", +"ringsaker.no", +"rissa.no", +"risor.no", +"risør.no", +"roan.no", +"rollag.no", +"rygge.no", +"ralingen.no", +"rælingen.no", +"rodoy.no", +"rødøy.no", +"romskog.no", +"rømskog.no", +"roros.no", +"røros.no", +"rost.no", +"røst.no", +"royken.no", +"røyken.no", +"royrvik.no", +"røyrvik.no", +"rade.no", +"råde.no", +"salangen.no", +"siellak.no", +"saltdal.no", +"salat.no", +"sálát.no", +"sálat.no", +"samnanger.no", +"sande.more-og-romsdal.no", +"sande.møre-og-romsdal.no", +"sande.vestfold.no", +"sandefjord.no", +"sandnes.no", +"sandoy.no", +"sandøy.no", +"sarpsborg.no", +"sauda.no", +"sauherad.no", +"sel.no", +"selbu.no", +"selje.no", +"seljord.no", +"sigdal.no", +"siljan.no", +"sirdal.no", +"skaun.no", +"skedsmo.no", +"ski.no", +"skien.no", +"skiptvet.no", +"skjervoy.no", +"skjervøy.no", +"skierva.no", +"skiervá.no", +"skjak.no", +"skjåk.no", +"skodje.no", +"skanland.no", +"skånland.no", +"skanit.no", +"skánit.no", +"smola.no", +"smøla.no", +"snillfjord.no", +"snasa.no", +"snåsa.no", +"snoasa.no", +"snaase.no", +"snåase.no", +"sogndal.no", +"sokndal.no", +"sola.no", +"solund.no", +"songdalen.no", +"sortland.no", +"spydeberg.no", +"stange.no", +"stavanger.no", +"steigen.no", +"steinkjer.no", +"stjordal.no", +"stjørdal.no", +"stokke.no", +"stor-elvdal.no", +"stord.no", +"stordal.no", +"storfjord.no", +"omasvuotna.no", +"strand.no", +"stranda.no", +"stryn.no", +"sula.no", +"suldal.no", +"sund.no", +"sunndal.no", +"surnadal.no", +"sveio.no", +"svelvik.no", +"sykkylven.no", +"sogne.no", +"søgne.no", +"somna.no", +"sømna.no", +"sondre-land.no", +"søndre-land.no", +"sor-aurdal.no", +"sør-aurdal.no", +"sor-fron.no", +"sør-fron.no", +"sor-odal.no", +"sør-odal.no", +"sor-varanger.no", +"sør-varanger.no", +"matta-varjjat.no", +"mátta-várjjat.no", +"sorfold.no", +"sørfold.no", +"sorreisa.no", +"sørreisa.no", +"sorum.no", +"sørum.no", +"tana.no", +"deatnu.no", +"time.no", +"tingvoll.no", +"tinn.no", +"tjeldsund.no", +"dielddanuorri.no", +"tjome.no", +"tjøme.no", +"tokke.no", +"tolga.no", +"torsken.no", +"tranoy.no", +"tranøy.no", +"tromso.no", +"tromsø.no", +"tromsa.no", +"romsa.no", +"trondheim.no", +"troandin.no", +"trysil.no", +"trana.no", +"træna.no", +"trogstad.no", +"trøgstad.no", +"tvedestrand.no", +"tydal.no", +"tynset.no", +"tysfjord.no", +"divtasvuodna.no", +"divttasvuotna.no", +"tysnes.no", +"tysvar.no", +"tysvær.no", +"tonsberg.no", +"tønsberg.no", +"ullensaker.no", +"ullensvang.no", +"ulvik.no", +"utsira.no", +"vadso.no", +"vadsø.no", +"cahcesuolo.no", +"čáhcesuolo.no", +"vaksdal.no", +"valle.no", +"vang.no", +"vanylven.no", +"vardo.no", +"vardø.no", +"varggat.no", +"várggát.no", +"vefsn.no", +"vaapste.no", +"vega.no", +"vegarshei.no", +"vegårshei.no", +"vennesla.no", +"verdal.no", +"verran.no", +"vestby.no", +"vestnes.no", +"vestre-slidre.no", +"vestre-toten.no", +"vestvagoy.no", +"vestvågøy.no", +"vevelstad.no", +"vik.no", +"vikna.no", +"vindafjord.no", +"volda.no", +"voss.no", +"varoy.no", +"værøy.no", +"vagan.no", +"vågan.no", +"voagat.no", +"vagsoy.no", +"vågsøy.no", +"vaga.no", +"vågå.no", +"valer.ostfold.no", +"våler.østfold.no", +"valer.hedmark.no", +"våler.hedmark.no", +"*.np", +"nr", +"biz.nr", +"info.nr", +"gov.nr", +"edu.nr", +"org.nr", +"net.nr", +"com.nr", +"nu", +"nz", +"ac.nz", +"co.nz", +"cri.nz", +"geek.nz", +"gen.nz", +"govt.nz", +"health.nz", +"iwi.nz", +"kiwi.nz", +"maori.nz", +"mil.nz", +"māori.nz", +"net.nz", +"org.nz", +"parliament.nz", +"school.nz", +"om", +"co.om", +"com.om", +"edu.om", +"gov.om", +"med.om", +"museum.om", +"net.om", +"org.om", +"pro.om", +"onion", +"org", +"pa", +"ac.pa", +"gob.pa", +"com.pa", +"org.pa", +"sld.pa", +"edu.pa", +"net.pa", +"ing.pa", +"abo.pa", +"med.pa", +"nom.pa", +"pe", +"edu.pe", +"gob.pe", +"nom.pe", +"mil.pe", +"org.pe", +"com.pe", +"net.pe", +"pf", +"com.pf", +"org.pf", +"edu.pf", +"*.pg", +"ph", +"com.ph", +"net.ph", +"org.ph", +"gov.ph", +"edu.ph", +"ngo.ph", +"mil.ph", +"i.ph", +"pk", +"com.pk", +"net.pk", +"edu.pk", +"org.pk", +"fam.pk", +"biz.pk", +"web.pk", +"gov.pk", +"gob.pk", +"gok.pk", +"gon.pk", +"gop.pk", +"gos.pk", +"info.pk", +"pl", +"com.pl", +"net.pl", +"org.pl", +"aid.pl", +"agro.pl", +"atm.pl", +"auto.pl", +"biz.pl", +"edu.pl", +"gmina.pl", +"gsm.pl", +"info.pl", +"mail.pl", +"miasta.pl", +"media.pl", +"mil.pl", +"nieruchomosci.pl", +"nom.pl", +"pc.pl", +"powiat.pl", +"priv.pl", +"realestate.pl", +"rel.pl", +"sex.pl", +"shop.pl", +"sklep.pl", +"sos.pl", +"szkola.pl", +"targi.pl", +"tm.pl", +"tourism.pl", +"travel.pl", +"turystyka.pl", +"gov.pl", +"ap.gov.pl", +"ic.gov.pl", +"is.gov.pl", +"us.gov.pl", +"kmpsp.gov.pl", +"kppsp.gov.pl", +"kwpsp.gov.pl", +"psp.gov.pl", +"wskr.gov.pl", +"kwp.gov.pl", +"mw.gov.pl", +"ug.gov.pl", +"um.gov.pl", +"umig.gov.pl", +"ugim.gov.pl", +"upow.gov.pl", +"uw.gov.pl", +"starostwo.gov.pl", +"pa.gov.pl", +"po.gov.pl", +"psse.gov.pl", +"pup.gov.pl", +"rzgw.gov.pl", +"sa.gov.pl", +"so.gov.pl", +"sr.gov.pl", +"wsa.gov.pl", +"sko.gov.pl", +"uzs.gov.pl", +"wiih.gov.pl", +"winb.gov.pl", +"pinb.gov.pl", +"wios.gov.pl", +"witd.gov.pl", +"wzmiuw.gov.pl", +"piw.gov.pl", +"wiw.gov.pl", +"griw.gov.pl", +"wif.gov.pl", +"oum.gov.pl", +"sdn.gov.pl", +"zp.gov.pl", +"uppo.gov.pl", +"mup.gov.pl", +"wuoz.gov.pl", +"konsulat.gov.pl", +"oirm.gov.pl", +"augustow.pl", +"babia-gora.pl", +"bedzin.pl", +"beskidy.pl", +"bialowieza.pl", +"bialystok.pl", +"bielawa.pl", +"bieszczady.pl", +"boleslawiec.pl", +"bydgoszcz.pl", +"bytom.pl", +"cieszyn.pl", +"czeladz.pl", +"czest.pl", +"dlugoleka.pl", +"elblag.pl", +"elk.pl", +"glogow.pl", +"gniezno.pl", +"gorlice.pl", +"grajewo.pl", +"ilawa.pl", +"jaworzno.pl", +"jelenia-gora.pl", +"jgora.pl", +"kalisz.pl", +"kazimierz-dolny.pl", +"karpacz.pl", +"kartuzy.pl", +"kaszuby.pl", +"katowice.pl", +"kepno.pl", +"ketrzyn.pl", +"klodzko.pl", +"kobierzyce.pl", +"kolobrzeg.pl", +"konin.pl", +"konskowola.pl", +"kutno.pl", +"lapy.pl", +"lebork.pl", +"legnica.pl", +"lezajsk.pl", +"limanowa.pl", +"lomza.pl", +"lowicz.pl", +"lubin.pl", +"lukow.pl", +"malbork.pl", +"malopolska.pl", +"mazowsze.pl", +"mazury.pl", +"mielec.pl", +"mielno.pl", +"mragowo.pl", +"naklo.pl", +"nowaruda.pl", +"nysa.pl", +"olawa.pl", +"olecko.pl", +"olkusz.pl", +"olsztyn.pl", +"opoczno.pl", +"opole.pl", +"ostroda.pl", +"ostroleka.pl", +"ostrowiec.pl", +"ostrowwlkp.pl", +"pila.pl", +"pisz.pl", +"podhale.pl", +"podlasie.pl", +"polkowice.pl", +"pomorze.pl", +"pomorskie.pl", +"prochowice.pl", +"pruszkow.pl", +"przeworsk.pl", +"pulawy.pl", +"radom.pl", +"rawa-maz.pl", +"rybnik.pl", +"rzeszow.pl", +"sanok.pl", +"sejny.pl", +"slask.pl", +"slupsk.pl", +"sosnowiec.pl", +"stalowa-wola.pl", +"skoczow.pl", +"starachowice.pl", +"stargard.pl", +"suwalki.pl", +"swidnica.pl", +"swiebodzin.pl", +"swinoujscie.pl", +"szczecin.pl", +"szczytno.pl", +"tarnobrzeg.pl", +"tgory.pl", +"turek.pl", +"tychy.pl", +"ustka.pl", +"walbrzych.pl", +"warmia.pl", +"warszawa.pl", +"waw.pl", +"wegrow.pl", +"wielun.pl", +"wlocl.pl", +"wloclawek.pl", +"wodzislaw.pl", +"wolomin.pl", +"wroclaw.pl", +"zachpomor.pl", +"zagan.pl", +"zarow.pl", +"zgora.pl", +"zgorzelec.pl", +"pm", +"pn", +"gov.pn", +"co.pn", +"org.pn", +"edu.pn", +"net.pn", +"post", +"pr", +"com.pr", +"net.pr", +"org.pr", +"gov.pr", +"edu.pr", +"isla.pr", +"pro.pr", +"biz.pr", +"info.pr", +"name.pr", +"est.pr", +"prof.pr", +"ac.pr", +"pro", +"aaa.pro", +"aca.pro", +"acct.pro", +"avocat.pro", +"bar.pro", +"cpa.pro", +"eng.pro", +"jur.pro", +"law.pro", +"med.pro", +"recht.pro", +"ps", +"edu.ps", +"gov.ps", +"sec.ps", +"plo.ps", +"com.ps", +"org.ps", +"net.ps", +"pt", +"net.pt", +"gov.pt", +"org.pt", +"edu.pt", +"int.pt", +"publ.pt", +"com.pt", +"nome.pt", +"pw", +"co.pw", +"ne.pw", +"or.pw", +"ed.pw", +"go.pw", +"belau.pw", +"py", +"com.py", +"coop.py", +"edu.py", +"gov.py", +"mil.py", +"net.py", +"org.py", +"qa", +"com.qa", +"edu.qa", +"gov.qa", +"mil.qa", +"name.qa", +"net.qa", +"org.qa", +"sch.qa", +"re", +"asso.re", +"com.re", +"nom.re", +"ro", +"arts.ro", +"com.ro", +"firm.ro", +"info.ro", +"nom.ro", +"nt.ro", +"org.ro", +"rec.ro", +"store.ro", +"tm.ro", +"www.ro", +"rs", +"ac.rs", +"co.rs", +"edu.rs", +"gov.rs", +"in.rs", +"org.rs", +"ru", +"rw", +"ac.rw", +"co.rw", +"coop.rw", +"gov.rw", +"mil.rw", +"net.rw", +"org.rw", +"sa", +"com.sa", +"net.sa", +"org.sa", +"gov.sa", +"med.sa", +"pub.sa", +"edu.sa", +"sch.sa", +"sb", +"com.sb", +"edu.sb", +"gov.sb", +"net.sb", +"org.sb", +"sc", +"com.sc", +"gov.sc", +"net.sc", +"org.sc", +"edu.sc", +"sd", +"com.sd", +"net.sd", +"org.sd", +"edu.sd", +"med.sd", +"tv.sd", +"gov.sd", +"info.sd", +"se", +"a.se", +"ac.se", +"b.se", +"bd.se", +"brand.se", +"c.se", +"d.se", +"e.se", +"f.se", +"fh.se", +"fhsk.se", +"fhv.se", +"g.se", +"h.se", +"i.se", +"k.se", +"komforb.se", +"kommunalforbund.se", +"komvux.se", +"l.se", +"lanbib.se", +"m.se", +"n.se", +"naturbruksgymn.se", +"o.se", +"org.se", +"p.se", +"parti.se", +"pp.se", +"press.se", +"r.se", +"s.se", +"t.se", +"tm.se", +"u.se", +"w.se", +"x.se", +"y.se", +"z.se", +"sg", +"com.sg", +"net.sg", +"org.sg", +"gov.sg", +"edu.sg", +"per.sg", +"sh", +"com.sh", +"net.sh", +"gov.sh", +"org.sh", +"mil.sh", +"si", +"sj", +"sk", +"sl", +"com.sl", +"net.sl", +"edu.sl", +"gov.sl", +"org.sl", +"sm", +"sn", +"art.sn", +"com.sn", +"edu.sn", +"gouv.sn", +"org.sn", +"perso.sn", +"univ.sn", +"so", +"com.so", +"edu.so", +"gov.so", +"me.so", +"net.so", +"org.so", +"sr", +"ss", +"biz.ss", +"com.ss", +"edu.ss", +"gov.ss", +"me.ss", +"net.ss", +"org.ss", +"sch.ss", +"st", +"co.st", +"com.st", +"consulado.st", +"edu.st", +"embaixada.st", +"mil.st", +"net.st", +"org.st", +"principe.st", +"saotome.st", +"store.st", +"su", +"sv", +"com.sv", +"edu.sv", +"gob.sv", +"org.sv", +"red.sv", +"sx", +"gov.sx", +"sy", +"edu.sy", +"gov.sy", +"net.sy", +"mil.sy", +"com.sy", +"org.sy", +"sz", +"co.sz", +"ac.sz", +"org.sz", +"tc", +"td", +"tel", +"tf", +"tg", +"th", +"ac.th", +"co.th", +"go.th", +"in.th", +"mi.th", +"net.th", +"or.th", +"tj", +"ac.tj", +"biz.tj", +"co.tj", +"com.tj", +"edu.tj", +"go.tj", +"gov.tj", +"int.tj", +"mil.tj", +"name.tj", +"net.tj", +"nic.tj", +"org.tj", +"test.tj", +"web.tj", +"tk", +"tl", +"gov.tl", +"tm", +"com.tm", +"co.tm", +"org.tm", +"net.tm", +"nom.tm", +"gov.tm", +"mil.tm", +"edu.tm", +"tn", +"com.tn", +"ens.tn", +"fin.tn", +"gov.tn", +"ind.tn", +"info.tn", +"intl.tn", +"mincom.tn", +"nat.tn", +"net.tn", +"org.tn", +"perso.tn", +"tourism.tn", +"to", +"com.to", +"gov.to", +"net.to", +"org.to", +"edu.to", +"mil.to", +"tr", +"av.tr", +"bbs.tr", +"bel.tr", +"biz.tr", +"com.tr", +"dr.tr", +"edu.tr", +"gen.tr", +"gov.tr", +"info.tr", +"mil.tr", +"k12.tr", +"kep.tr", +"name.tr", +"net.tr", +"org.tr", +"pol.tr", +"tel.tr", +"tsk.tr", +"tv.tr", +"web.tr", +"nc.tr", +"gov.nc.tr", +"tt", +"co.tt", +"com.tt", +"org.tt", +"net.tt", +"biz.tt", +"info.tt", +"pro.tt", +"int.tt", +"coop.tt", +"jobs.tt", +"mobi.tt", +"travel.tt", +"museum.tt", +"aero.tt", +"name.tt", +"gov.tt", +"edu.tt", +"tv", +"tw", +"edu.tw", +"gov.tw", +"mil.tw", +"com.tw", +"net.tw", +"org.tw", +"idv.tw", +"game.tw", +"ebiz.tw", +"club.tw", +"網路.tw", +"組織.tw", +"商業.tw", +"tz", +"ac.tz", +"co.tz", +"go.tz", +"hotel.tz", +"info.tz", +"me.tz", +"mil.tz", +"mobi.tz", +"ne.tz", +"or.tz", +"sc.tz", +"tv.tz", +"ua", +"com.ua", +"edu.ua", +"gov.ua", +"in.ua", +"net.ua", +"org.ua", +"cherkassy.ua", +"cherkasy.ua", +"chernigov.ua", +"chernihiv.ua", +"chernivtsi.ua", +"chernovtsy.ua", +"ck.ua", +"cn.ua", +"cr.ua", +"crimea.ua", +"cv.ua", +"dn.ua", +"dnepropetrovsk.ua", +"dnipropetrovsk.ua", +"donetsk.ua", +"dp.ua", +"if.ua", +"ivano-frankivsk.ua", +"kh.ua", +"kharkiv.ua", +"kharkov.ua", +"kherson.ua", +"khmelnitskiy.ua", +"khmelnytskyi.ua", +"kiev.ua", +"kirovograd.ua", +"km.ua", +"kr.ua", +"krym.ua", +"ks.ua", +"kv.ua", +"kyiv.ua", +"lg.ua", +"lt.ua", +"lugansk.ua", +"lutsk.ua", +"lv.ua", +"lviv.ua", +"mk.ua", +"mykolaiv.ua", +"nikolaev.ua", +"od.ua", +"odesa.ua", +"odessa.ua", +"pl.ua", +"poltava.ua", +"rivne.ua", +"rovno.ua", +"rv.ua", +"sb.ua", +"sebastopol.ua", +"sevastopol.ua", +"sm.ua", +"sumy.ua", +"te.ua", +"ternopil.ua", +"uz.ua", +"uzhgorod.ua", +"vinnica.ua", +"vinnytsia.ua", +"vn.ua", +"volyn.ua", +"yalta.ua", +"zaporizhzhe.ua", +"zaporizhzhia.ua", +"zhitomir.ua", +"zhytomyr.ua", +"zp.ua", +"zt.ua", +"ug", +"co.ug", +"or.ug", +"ac.ug", +"sc.ug", +"go.ug", +"ne.ug", +"com.ug", +"org.ug", +"uk", +"ac.uk", +"co.uk", +"gov.uk", +"ltd.uk", +"me.uk", +"net.uk", +"nhs.uk", +"org.uk", +"plc.uk", +"police.uk", +"*.sch.uk", +"us", +"dni.us", +"fed.us", +"isa.us", +"kids.us", +"nsn.us", +"ak.us", +"al.us", +"ar.us", +"as.us", +"az.us", +"ca.us", +"co.us", +"ct.us", +"dc.us", +"de.us", +"fl.us", +"ga.us", +"gu.us", +"hi.us", +"ia.us", +"id.us", +"il.us", +"in.us", +"ks.us", +"ky.us", +"la.us", +"ma.us", +"md.us", +"me.us", +"mi.us", +"mn.us", +"mo.us", +"ms.us", +"mt.us", +"nc.us", +"nd.us", +"ne.us", +"nh.us", +"nj.us", +"nm.us", +"nv.us", +"ny.us", +"oh.us", +"ok.us", +"or.us", +"pa.us", +"pr.us", +"ri.us", +"sc.us", +"sd.us", +"tn.us", +"tx.us", +"ut.us", +"vi.us", +"vt.us", +"va.us", +"wa.us", +"wi.us", +"wv.us", +"wy.us", +"k12.ak.us", +"k12.al.us", +"k12.ar.us", +"k12.as.us", +"k12.az.us", +"k12.ca.us", +"k12.co.us", +"k12.ct.us", +"k12.dc.us", +"k12.de.us", +"k12.fl.us", +"k12.ga.us", +"k12.gu.us", +"k12.ia.us", +"k12.id.us", +"k12.il.us", +"k12.in.us", +"k12.ks.us", +"k12.ky.us", +"k12.la.us", +"k12.ma.us", +"k12.md.us", +"k12.me.us", +"k12.mi.us", +"k12.mn.us", +"k12.mo.us", +"k12.ms.us", +"k12.mt.us", +"k12.nc.us", +"k12.ne.us", +"k12.nh.us", +"k12.nj.us", +"k12.nm.us", +"k12.nv.us", +"k12.ny.us", +"k12.oh.us", +"k12.ok.us", +"k12.or.us", +"k12.pa.us", +"k12.pr.us", +"k12.sc.us", +"k12.tn.us", +"k12.tx.us", +"k12.ut.us", +"k12.vi.us", +"k12.vt.us", +"k12.va.us", +"k12.wa.us", +"k12.wi.us", +"k12.wy.us", +"cc.ak.us", +"cc.al.us", +"cc.ar.us", +"cc.as.us", +"cc.az.us", +"cc.ca.us", +"cc.co.us", +"cc.ct.us", +"cc.dc.us", +"cc.de.us", +"cc.fl.us", +"cc.ga.us", +"cc.gu.us", +"cc.hi.us", +"cc.ia.us", +"cc.id.us", +"cc.il.us", +"cc.in.us", +"cc.ks.us", +"cc.ky.us", +"cc.la.us", +"cc.ma.us", +"cc.md.us", +"cc.me.us", +"cc.mi.us", +"cc.mn.us", +"cc.mo.us", +"cc.ms.us", +"cc.mt.us", +"cc.nc.us", +"cc.nd.us", +"cc.ne.us", +"cc.nh.us", +"cc.nj.us", +"cc.nm.us", +"cc.nv.us", +"cc.ny.us", +"cc.oh.us", +"cc.ok.us", +"cc.or.us", +"cc.pa.us", +"cc.pr.us", +"cc.ri.us", +"cc.sc.us", +"cc.sd.us", +"cc.tn.us", +"cc.tx.us", +"cc.ut.us", +"cc.vi.us", +"cc.vt.us", +"cc.va.us", +"cc.wa.us", +"cc.wi.us", +"cc.wv.us", +"cc.wy.us", +"lib.ak.us", +"lib.al.us", +"lib.ar.us", +"lib.as.us", +"lib.az.us", +"lib.ca.us", +"lib.co.us", +"lib.ct.us", +"lib.dc.us", +"lib.fl.us", +"lib.ga.us", +"lib.gu.us", +"lib.hi.us", +"lib.ia.us", +"lib.id.us", +"lib.il.us", +"lib.in.us", +"lib.ks.us", +"lib.ky.us", +"lib.la.us", +"lib.ma.us", +"lib.md.us", +"lib.me.us", +"lib.mi.us", +"lib.mn.us", +"lib.mo.us", +"lib.ms.us", +"lib.mt.us", +"lib.nc.us", +"lib.nd.us", +"lib.ne.us", +"lib.nh.us", +"lib.nj.us", +"lib.nm.us", +"lib.nv.us", +"lib.ny.us", +"lib.oh.us", +"lib.ok.us", +"lib.or.us", +"lib.pa.us", +"lib.pr.us", +"lib.ri.us", +"lib.sc.us", +"lib.sd.us", +"lib.tn.us", +"lib.tx.us", +"lib.ut.us", +"lib.vi.us", +"lib.vt.us", +"lib.va.us", +"lib.wa.us", +"lib.wi.us", +"lib.wy.us", +"pvt.k12.ma.us", +"chtr.k12.ma.us", +"paroch.k12.ma.us", +"ann-arbor.mi.us", +"cog.mi.us", +"dst.mi.us", +"eaton.mi.us", +"gen.mi.us", +"mus.mi.us", +"tec.mi.us", +"washtenaw.mi.us", +"uy", +"com.uy", +"edu.uy", +"gub.uy", +"mil.uy", +"net.uy", +"org.uy", +"uz", +"co.uz", +"com.uz", +"net.uz", +"org.uz", +"va", +"vc", +"com.vc", +"net.vc", +"org.vc", +"gov.vc", +"mil.vc", +"edu.vc", +"ve", +"arts.ve", +"bib.ve", +"co.ve", +"com.ve", +"e12.ve", +"edu.ve", +"firm.ve", +"gob.ve", +"gov.ve", +"info.ve", +"int.ve", +"mil.ve", +"net.ve", +"nom.ve", +"org.ve", +"rar.ve", +"rec.ve", +"store.ve", +"tec.ve", +"web.ve", +"vg", +"vi", +"co.vi", +"com.vi", +"k12.vi", +"net.vi", +"org.vi", +"vn", +"com.vn", +"net.vn", +"org.vn", +"edu.vn", +"gov.vn", +"int.vn", +"ac.vn", +"biz.vn", +"info.vn", +"name.vn", +"pro.vn", +"health.vn", +"vu", +"com.vu", +"edu.vu", +"net.vu", +"org.vu", +"wf", +"ws", +"com.ws", +"net.ws", +"org.ws", +"gov.ws", +"edu.ws", +"yt", +"امارات", +"հայ", +"বাংলা", +"бг", +"البحرين", +"бел", +"中国", +"中國", +"الجزائر", +"مصر", +"ею", +"ευ", +"موريتانيا", +"გე", +"ελ", +"香港", +"公司.香港", +"教育.香港", +"政府.香港", +"個人.香港", +"網絡.香港", +"組織.香港", +"ಭಾರತ", +"ଭାରତ", +"ভাৰত", +"भारतम्", +"भारोत", +"ڀارت", +"ഭാരതം", +"भारत", +"بارت", +"بھارت", +"భారత్", +"ભારત", +"ਭਾਰਤ", +"ভারত", +"இந்தியா", +"ایران", +"ايران", +"عراق", +"الاردن", +"한국", +"қаз", +"ລາວ", +"ලංකා", +"இலங்கை", +"المغرب", +"мкд", +"мон", +"澳門", +"澳门", +"مليسيا", +"عمان", +"پاکستان", +"پاكستان", +"فلسطين", +"срб", +"пр.срб", +"орг.срб", +"обр.срб", +"од.срб", +"упр.срб", +"ак.срб", +"рф", +"قطر", +"السعودية", +"السعودیة", +"السعودیۃ", +"السعوديه", +"سودان", +"新加坡", +"சிங்கப்பூர்", +"سورية", +"سوريا", +"ไทย", +"ศึกษา.ไทย", +"ธุรกิจ.ไทย", +"รัฐบาล.ไทย", +"ทหาร.ไทย", +"เน็ต.ไทย", +"องค์กร.ไทย", +"تونس", +"台灣", +"台湾", +"臺灣", +"укр", +"اليمن", +"xxx", +"ye", +"com.ye", +"edu.ye", +"gov.ye", +"net.ye", +"mil.ye", +"org.ye", +"ac.za", +"agric.za", +"alt.za", +"co.za", +"edu.za", +"gov.za", +"grondar.za", +"law.za", +"mil.za", +"net.za", +"ngo.za", +"nic.za", +"nis.za", +"nom.za", +"org.za", +"school.za", +"tm.za", +"web.za", +"zm", +"ac.zm", +"biz.zm", +"co.zm", +"com.zm", +"edu.zm", +"gov.zm", +"info.zm", +"mil.zm", +"net.zm", +"org.zm", +"sch.zm", +"zw", +"ac.zw", +"co.zw", +"gov.zw", +"mil.zw", +"org.zw", +"aaa", +"aarp", +"abarth", +"abb", +"abbott", +"abbvie", +"abc", +"able", +"abogado", +"abudhabi", +"academy", +"accenture", +"accountant", +"accountants", +"aco", +"actor", +"adac", +"ads", +"adult", +"aeg", +"aetna", +"afl", +"africa", +"agakhan", +"agency", +"aig", +"airbus", +"airforce", +"airtel", +"akdn", +"alfaromeo", +"alibaba", +"alipay", +"allfinanz", +"allstate", +"ally", +"alsace", +"alstom", +"amazon", +"americanexpress", +"americanfamily", +"amex", +"amfam", +"amica", +"amsterdam", +"analytics", +"android", +"anquan", +"anz", +"aol", +"apartments", +"app", +"apple", +"aquarelle", +"arab", +"aramco", +"archi", +"army", +"art", +"arte", +"asda", +"associates", +"athleta", +"attorney", +"auction", +"audi", +"audible", +"audio", +"auspost", +"author", +"auto", +"autos", +"avianca", +"aws", +"axa", +"azure", +"baby", +"baidu", +"banamex", +"bananarepublic", +"band", +"bank", +"bar", +"barcelona", +"barclaycard", +"barclays", +"barefoot", +"bargains", +"baseball", +"basketball", +"bauhaus", +"bayern", +"bbc", +"bbt", +"bbva", +"bcg", +"bcn", +"beats", +"beauty", +"beer", +"bentley", +"berlin", +"best", +"bestbuy", +"bet", +"bharti", +"bible", +"bid", +"bike", +"bing", +"bingo", +"bio", +"black", +"blackfriday", +"blockbuster", +"blog", +"bloomberg", +"blue", +"bms", +"bmw", +"bnpparibas", +"boats", +"boehringer", +"bofa", +"bom", +"bond", +"boo", +"book", +"booking", +"bosch", +"bostik", +"boston", +"bot", +"boutique", +"box", +"bradesco", +"bridgestone", +"broadway", +"broker", +"brother", +"brussels", +"bugatti", +"build", +"builders", +"business", +"buy", +"buzz", +"bzh", +"cab", +"cafe", +"cal", +"call", +"calvinklein", +"cam", +"camera", +"camp", +"cancerresearch", +"canon", +"capetown", +"capital", +"capitalone", +"car", +"caravan", +"cards", +"care", +"career", +"careers", +"cars", +"casa", +"case", +"cash", +"casino", +"catering", +"catholic", +"cba", +"cbn", +"cbre", +"cbs", +"center", +"ceo", +"cern", +"cfa", +"cfd", +"chanel", +"channel", +"charity", +"chase", +"chat", +"cheap", +"chintai", +"christmas", +"chrome", +"church", +"cipriani", +"circle", +"cisco", +"citadel", +"citi", +"citic", +"city", +"cityeats", +"claims", +"cleaning", +"click", +"clinic", +"clinique", +"clothing", +"cloud", +"club", +"clubmed", +"coach", +"codes", +"coffee", +"college", +"cologne", +"comcast", +"commbank", +"community", +"company", +"compare", +"computer", +"comsec", +"condos", +"construction", +"consulting", +"contact", +"contractors", +"cooking", +"cookingchannel", +"cool", +"corsica", +"country", +"coupon", +"coupons", +"courses", +"cpa", +"credit", +"creditcard", +"creditunion", +"cricket", +"crown", +"crs", +"cruise", +"cruises", +"cuisinella", +"cymru", +"cyou", +"dabur", +"dad", +"dance", +"data", +"date", +"dating", +"datsun", +"day", +"dclk", +"dds", +"deal", +"dealer", +"deals", +"degree", +"delivery", +"dell", +"deloitte", +"delta", +"democrat", +"dental", +"dentist", +"desi", +"design", +"dev", +"dhl", +"diamonds", +"diet", +"digital", +"direct", +"directory", +"discount", +"discover", +"dish", +"diy", +"dnp", +"docs", +"doctor", +"dog", +"domains", +"dot", +"download", +"drive", +"dtv", +"dubai", +"dunlop", +"dupont", +"durban", +"dvag", +"dvr", +"earth", +"eat", +"eco", +"edeka", +"education", +"email", +"emerck", +"energy", +"engineer", +"engineering", +"enterprises", +"epson", +"equipment", +"ericsson", +"erni", +"esq", +"estate", +"etisalat", +"eurovision", +"eus", +"events", +"exchange", +"expert", +"exposed", +"express", +"extraspace", +"fage", +"fail", +"fairwinds", +"faith", +"family", +"fan", +"fans", +"farm", +"farmers", +"fashion", +"fast", +"fedex", +"feedback", +"ferrari", +"ferrero", +"fiat", +"fidelity", +"fido", +"film", +"final", +"finance", +"financial", +"fire", +"firestone", +"firmdale", +"fish", +"fishing", +"fit", +"fitness", +"flickr", +"flights", +"flir", +"florist", +"flowers", +"fly", +"foo", +"food", +"foodnetwork", +"football", +"ford", +"forex", +"forsale", +"forum", +"foundation", +"fox", +"free", +"fresenius", +"frl", +"frogans", +"frontdoor", +"frontier", +"ftr", +"fujitsu", +"fun", +"fund", +"furniture", +"futbol", +"fyi", +"gal", +"gallery", +"gallo", +"gallup", +"game", +"games", +"gap", +"garden", +"gay", +"gbiz", +"gdn", +"gea", +"gent", +"genting", +"george", +"ggee", +"gift", +"gifts", +"gives", +"giving", +"glass", +"gle", +"global", +"globo", +"gmail", +"gmbh", +"gmo", +"gmx", +"godaddy", +"gold", +"goldpoint", +"golf", +"goo", +"goodyear", +"goog", +"google", +"gop", +"got", +"grainger", +"graphics", +"gratis", +"green", +"gripe", +"grocery", +"group", +"guardian", +"gucci", +"guge", +"guide", +"guitars", +"guru", +"hair", +"hamburg", +"hangout", +"haus", +"hbo", +"hdfc", +"hdfcbank", +"health", +"healthcare", +"help", +"helsinki", +"here", +"hermes", +"hgtv", +"hiphop", +"hisamitsu", +"hitachi", +"hiv", +"hkt", +"hockey", +"holdings", +"holiday", +"homedepot", +"homegoods", +"homes", +"homesense", +"honda", +"horse", +"hospital", +"host", +"hosting", +"hot", +"hoteles", +"hotels", +"hotmail", +"house", +"how", +"hsbc", +"hughes", +"hyatt", +"hyundai", +"ibm", +"icbc", +"ice", +"icu", +"ieee", +"ifm", +"ikano", +"imamat", +"imdb", +"immo", +"immobilien", +"inc", +"industries", +"infiniti", +"ing", +"ink", +"institute", +"insurance", +"insure", +"international", +"intuit", +"investments", +"ipiranga", +"irish", +"ismaili", +"ist", +"istanbul", +"itau", +"itv", +"jaguar", +"java", +"jcb", +"jeep", +"jetzt", +"jewelry", +"jio", +"jll", +"jmp", +"jnj", +"joburg", +"jot", +"joy", +"jpmorgan", +"jprs", +"juegos", +"juniper", +"kaufen", +"kddi", +"kerryhotels", +"kerrylogistics", +"kerryproperties", +"kfh", +"kia", +"kids", +"kim", +"kinder", +"kindle", +"kitchen", +"kiwi", +"koeln", +"komatsu", +"kosher", +"kpmg", +"kpn", +"krd", +"kred", +"kuokgroup", +"kyoto", +"lacaixa", +"lamborghini", +"lamer", +"lancaster", +"lancia", +"land", +"landrover", +"lanxess", +"lasalle", +"lat", +"latino", +"latrobe", +"law", +"lawyer", +"lds", +"lease", +"leclerc", +"lefrak", +"legal", +"lego", +"lexus", +"lgbt", +"lidl", +"life", +"lifeinsurance", +"lifestyle", +"lighting", +"like", +"lilly", +"limited", +"limo", +"lincoln", +"linde", +"link", +"lipsy", +"live", +"living", +"llc", +"llp", +"loan", +"loans", +"locker", +"locus", +"loft", +"lol", +"london", +"lotte", +"lotto", +"love", +"lpl", +"lplfinancial", +"ltd", +"ltda", +"lundbeck", +"luxe", +"luxury", +"macys", +"madrid", +"maif", +"maison", +"makeup", +"man", +"management", +"mango", +"map", +"market", +"marketing", +"markets", +"marriott", +"marshalls", +"maserati", +"mattel", +"mba", +"mckinsey", +"med", +"media", +"meet", +"melbourne", +"meme", +"memorial", +"men", +"menu", +"merckmsd", +"miami", +"microsoft", +"mini", +"mint", +"mit", +"mitsubishi", +"mlb", +"mls", +"mma", +"mobile", +"moda", +"moe", +"moi", +"mom", +"monash", +"money", +"monster", +"mormon", +"mortgage", +"moscow", +"moto", +"motorcycles", +"mov", +"movie", +"msd", +"mtn", +"mtr", +"music", +"mutual", +"nab", +"nagoya", +"natura", +"navy", +"nba", +"nec", +"netbank", +"netflix", +"network", +"neustar", +"new", +"news", +"next", +"nextdirect", +"nexus", +"nfl", +"ngo", +"nhk", +"nico", +"nike", +"nikon", +"ninja", +"nissan", +"nissay", +"nokia", +"northwesternmutual", +"norton", +"now", +"nowruz", +"nowtv", +"nra", +"nrw", +"ntt", +"nyc", +"obi", +"observer", +"office", +"okinawa", +"olayan", +"olayangroup", +"oldnavy", +"ollo", +"omega", +"one", +"ong", +"onl", +"online", +"ooo", +"open", +"oracle", +"orange", +"organic", +"origins", +"osaka", +"otsuka", +"ott", +"ovh", +"page", +"panasonic", +"paris", +"pars", +"partners", +"parts", +"party", +"passagens", +"pay", +"pccw", +"pet", +"pfizer", +"pharmacy", +"phd", +"philips", +"phone", +"photo", +"photography", +"photos", +"physio", +"pics", +"pictet", +"pictures", +"pid", +"pin", +"ping", +"pink", +"pioneer", +"pizza", +"place", +"play", +"playstation", +"plumbing", +"plus", +"pnc", +"pohl", +"poker", +"politie", +"porn", +"pramerica", +"praxi", +"press", +"prime", +"prod", +"productions", +"prof", +"progressive", +"promo", +"properties", +"property", +"protection", +"pru", +"prudential", +"pub", +"pwc", +"qpon", +"quebec", +"quest", +"racing", +"radio", +"read", +"realestate", +"realtor", +"realty", +"recipes", +"red", +"redstone", +"redumbrella", +"rehab", +"reise", +"reisen", +"reit", +"reliance", +"ren", +"rent", +"rentals", +"repair", +"report", +"republican", +"rest", +"restaurant", +"review", +"reviews", +"rexroth", +"rich", +"richardli", +"ricoh", +"ril", +"rio", +"rip", +"rocher", +"rocks", +"rodeo", +"rogers", +"room", +"rsvp", +"rugby", +"ruhr", +"run", +"rwe", +"ryukyu", +"saarland", +"safe", +"safety", +"sakura", +"sale", +"salon", +"samsclub", +"samsung", +"sandvik", +"sandvikcoromant", +"sanofi", +"sap", +"sarl", +"sas", +"save", +"saxo", +"sbi", +"sbs", +"sca", +"scb", +"schaeffler", +"schmidt", +"scholarships", +"school", +"schule", +"schwarz", +"science", +"scot", +"search", +"seat", +"secure", +"security", +"seek", +"select", +"sener", +"services", +"ses", +"seven", +"sew", +"sex", +"sexy", +"sfr", +"shangrila", +"sharp", +"shaw", +"shell", +"shia", +"shiksha", +"shoes", +"shop", +"shopping", +"shouji", +"show", +"showtime", +"silk", +"sina", +"singles", +"site", +"ski", +"skin", +"sky", +"skype", +"sling", +"smart", +"smile", +"sncf", +"soccer", +"social", +"softbank", +"software", +"sohu", +"solar", +"solutions", +"song", +"sony", +"soy", +"spa", +"space", +"sport", +"spot", +"srl", +"stada", +"staples", +"star", +"statebank", +"statefarm", +"stc", +"stcgroup", +"stockholm", +"storage", +"store", +"stream", +"studio", +"study", +"style", +"sucks", +"supplies", +"supply", +"support", +"surf", +"surgery", +"suzuki", +"swatch", +"swiss", +"sydney", +"systems", +"tab", +"taipei", +"talk", +"taobao", +"target", +"tatamotors", +"tatar", +"tattoo", +"tax", +"taxi", +"tci", +"tdk", +"team", +"tech", +"technology", +"temasek", +"tennis", +"teva", +"thd", +"theater", +"theatre", +"tiaa", +"tickets", +"tienda", +"tiffany", +"tips", +"tires", +"tirol", +"tjmaxx", +"tjx", +"tkmaxx", +"tmall", +"today", +"tokyo", +"tools", +"top", +"toray", +"toshiba", +"total", +"tours", +"town", +"toyota", +"toys", +"trade", +"trading", +"training", +"travel", +"travelchannel", +"travelers", +"travelersinsurance", +"trust", +"trv", +"tube", +"tui", +"tunes", +"tushu", +"tvs", +"ubank", +"ubs", +"unicom", +"university", +"uno", +"uol", +"ups", +"vacations", +"vana", +"vanguard", +"vegas", +"ventures", +"verisign", +"versicherung", +"vet", +"viajes", +"video", +"vig", +"viking", +"villas", +"vin", +"vip", +"virgin", +"visa", +"vision", +"viva", +"vivo", +"vlaanderen", +"vodka", +"volkswagen", +"volvo", +"vote", +"voting", +"voto", +"voyage", +"vuelos", +"wales", +"walmart", +"walter", +"wang", +"wanggou", +"watch", +"watches", +"weather", +"weatherchannel", +"webcam", +"weber", +"website", +"wedding", +"weibo", +"weir", +"whoswho", +"wien", +"wiki", +"williamhill", +"win", +"windows", +"wine", +"winners", +"wme", +"wolterskluwer", +"woodside", +"work", +"works", +"world", +"wow", +"wtc", +"wtf", +"xbox", +"xerox", +"xfinity", +"xihuan", +"xin", +"कॉम", +"セール", +"佛山", +"慈善", +"集团", +"在线", +"点看", +"คอม", +"八卦", +"موقع", +"公益", +"公司", +"香格里拉", +"网站", +"移动", +"我爱你", +"москва", +"католик", +"онлайн", +"сайт", +"联通", +"קום", +"时尚", +"微博", +"淡马锡", +"ファッション", +"орг", +"नेट", +"ストア", +"アマゾン", +"삼성", +"商标", +"商店", +"商城", +"дети", +"ポイント", +"新闻", +"家電", +"كوم", +"中文网", +"中信", +"娱乐", +"谷歌", +"電訊盈科", +"购物", +"クラウド", +"通販", +"网店", +"संगठन", +"餐厅", +"网络", +"ком", +"亚马逊", +"诺基亚", +"食品", +"飞利浦", +"手机", +"ارامكو", +"العليان", +"اتصالات", +"بازار", +"ابوظبي", +"كاثوليك", +"همراه", +"닷컴", +"政府", +"شبكة", +"بيتك", +"عرب", +"机构", +"组织机构", +"健康", +"招聘", +"рус", +"大拿", +"みんな", +"グーグル", +"世界", +"書籍", +"网址", +"닷넷", +"コム", +"天主教", +"游戏", +"vermögensberater", +"vermögensberatung", +"企业", +"信息", +"嘉里大酒店", +"嘉里", +"广东", +"政务", +"xyz", +"yachts", +"yahoo", +"yamaxun", +"yandex", +"yodobashi", +"yoga", +"yokohama", +"you", +"youtube", +"yun", +"zappos", +"zara", +"zero", +"zip", +"zone", +"zuerich", +"cc.ua", +"inf.ua", +"ltd.ua", +"611.to", +"graphox.us", +"*.devcdnaccesso.com", +"adobeaemcloud.com", +"*.dev.adobeaemcloud.com", +"hlx.live", +"adobeaemcloud.net", +"hlx.page", +"hlx3.page", +"beep.pl", +"airkitapps.com", +"airkitapps-au.com", +"airkitapps.eu", +"aivencloud.com", +"barsy.ca", +"*.compute.estate", +"*.alces.network", +"kasserver.com", +"altervista.org", +"alwaysdata.net", +"cloudfront.net", +"*.compute.amazonaws.com", +"*.compute-1.amazonaws.com", +"*.compute.amazonaws.com.cn", +"us-east-1.amazonaws.com", +"cn-north-1.eb.amazonaws.com.cn", +"cn-northwest-1.eb.amazonaws.com.cn", +"elasticbeanstalk.com", +"ap-northeast-1.elasticbeanstalk.com", +"ap-northeast-2.elasticbeanstalk.com", +"ap-northeast-3.elasticbeanstalk.com", +"ap-south-1.elasticbeanstalk.com", +"ap-southeast-1.elasticbeanstalk.com", +"ap-southeast-2.elasticbeanstalk.com", +"ca-central-1.elasticbeanstalk.com", +"eu-central-1.elasticbeanstalk.com", +"eu-west-1.elasticbeanstalk.com", +"eu-west-2.elasticbeanstalk.com", +"eu-west-3.elasticbeanstalk.com", +"sa-east-1.elasticbeanstalk.com", +"us-east-1.elasticbeanstalk.com", +"us-east-2.elasticbeanstalk.com", +"us-gov-west-1.elasticbeanstalk.com", +"us-west-1.elasticbeanstalk.com", +"us-west-2.elasticbeanstalk.com", +"*.elb.amazonaws.com", +"*.elb.amazonaws.com.cn", +"awsglobalaccelerator.com", +"s3.amazonaws.com", +"s3-ap-northeast-1.amazonaws.com", +"s3-ap-northeast-2.amazonaws.com", +"s3-ap-south-1.amazonaws.com", +"s3-ap-southeast-1.amazonaws.com", +"s3-ap-southeast-2.amazonaws.com", +"s3-ca-central-1.amazonaws.com", +"s3-eu-central-1.amazonaws.com", +"s3-eu-west-1.amazonaws.com", +"s3-eu-west-2.amazonaws.com", +"s3-eu-west-3.amazonaws.com", +"s3-external-1.amazonaws.com", +"s3-fips-us-gov-west-1.amazonaws.com", +"s3-sa-east-1.amazonaws.com", +"s3-us-gov-west-1.amazonaws.com", +"s3-us-east-2.amazonaws.com", +"s3-us-west-1.amazonaws.com", +"s3-us-west-2.amazonaws.com", +"s3.ap-northeast-2.amazonaws.com", +"s3.ap-south-1.amazonaws.com", +"s3.cn-north-1.amazonaws.com.cn", +"s3.ca-central-1.amazonaws.com", +"s3.eu-central-1.amazonaws.com", +"s3.eu-west-2.amazonaws.com", +"s3.eu-west-3.amazonaws.com", +"s3.us-east-2.amazonaws.com", +"s3.dualstack.ap-northeast-1.amazonaws.com", +"s3.dualstack.ap-northeast-2.amazonaws.com", +"s3.dualstack.ap-south-1.amazonaws.com", +"s3.dualstack.ap-southeast-1.amazonaws.com", +"s3.dualstack.ap-southeast-2.amazonaws.com", +"s3.dualstack.ca-central-1.amazonaws.com", +"s3.dualstack.eu-central-1.amazonaws.com", +"s3.dualstack.eu-west-1.amazonaws.com", +"s3.dualstack.eu-west-2.amazonaws.com", +"s3.dualstack.eu-west-3.amazonaws.com", +"s3.dualstack.sa-east-1.amazonaws.com", +"s3.dualstack.us-east-1.amazonaws.com", +"s3.dualstack.us-east-2.amazonaws.com", +"s3-website-us-east-1.amazonaws.com", +"s3-website-us-west-1.amazonaws.com", +"s3-website-us-west-2.amazonaws.com", +"s3-website-ap-northeast-1.amazonaws.com", +"s3-website-ap-southeast-1.amazonaws.com", +"s3-website-ap-southeast-2.amazonaws.com", +"s3-website-eu-west-1.amazonaws.com", +"s3-website-sa-east-1.amazonaws.com", +"s3-website.ap-northeast-2.amazonaws.com", +"s3-website.ap-south-1.amazonaws.com", +"s3-website.ca-central-1.amazonaws.com", +"s3-website.eu-central-1.amazonaws.com", +"s3-website.eu-west-2.amazonaws.com", +"s3-website.eu-west-3.amazonaws.com", +"s3-website.us-east-2.amazonaws.com", +"t3l3p0rt.net", +"tele.amune.org", +"apigee.io", +"siiites.com", +"appspacehosted.com", +"appspaceusercontent.com", +"appudo.net", +"on-aptible.com", +"user.aseinet.ne.jp", +"gv.vc", +"d.gv.vc", +"user.party.eus", +"pimienta.org", +"poivron.org", +"potager.org", +"sweetpepper.org", +"myasustor.com", +"cdn.prod.atlassian-dev.net", +"translated.page", +"myfritz.net", +"onavstack.net", +"*.awdev.ca", +"*.advisor.ws", +"ecommerce-shop.pl", +"b-data.io", +"backplaneapp.io", +"balena-devices.com", +"rs.ba", +"*.banzai.cloud", +"app.banzaicloud.io", +"*.backyards.banzaicloud.io", +"base.ec", +"official.ec", +"buyshop.jp", +"fashionstore.jp", +"handcrafted.jp", +"kawaiishop.jp", +"supersale.jp", +"theshop.jp", +"shopselect.net", +"base.shop", +"*.beget.app", +"betainabox.com", +"bnr.la", +"bitbucket.io", +"blackbaudcdn.net", +"of.je", +"bluebite.io", +"boomla.net", +"boutir.com", +"boxfuse.io", +"square7.ch", +"bplaced.com", +"bplaced.de", +"square7.de", +"bplaced.net", +"square7.net", +"shop.brendly.rs", +"browsersafetymark.io", +"uk0.bigv.io", +"dh.bytemark.co.uk", +"vm.bytemark.co.uk", +"cafjs.com", +"mycd.eu", +"drr.ac", +"uwu.ai", +"carrd.co", +"crd.co", +"ju.mp", +"ae.org", +"br.com", +"cn.com", +"com.de", +"com.se", +"de.com", +"eu.com", +"gb.net", +"hu.net", +"jp.net", +"jpn.com", +"mex.com", +"ru.com", +"sa.com", +"se.net", +"uk.com", +"uk.net", +"us.com", +"za.bz", +"za.com", +"ar.com", +"hu.com", +"kr.com", +"no.com", +"qc.com", +"uy.com", +"africa.com", +"gr.com", +"in.net", +"web.in", +"us.org", +"co.com", +"aus.basketball", +"nz.basketball", +"radio.am", +"radio.fm", +"c.la", +"certmgr.org", +"cx.ua", +"discourse.group", +"discourse.team", +"cleverapps.io", +"clerk.app", +"clerkstage.app", +"*.lcl.dev", +"*.lclstage.dev", +"*.stg.dev", +"*.stgstage.dev", +"clickrising.net", +"c66.me", +"cloud66.ws", +"cloud66.zone", +"jdevcloud.com", +"wpdevcloud.com", +"cloudaccess.host", +"freesite.host", +"cloudaccess.net", +"cloudcontrolled.com", +"cloudcontrolapp.com", +"*.cloudera.site", +"pages.dev", +"trycloudflare.com", +"workers.dev", +"wnext.app", +"co.ca", +"*.otap.co", +"co.cz", +"c.cdn77.org", +"cdn77-ssl.net", +"r.cdn77.net", +"rsc.cdn77.org", +"ssl.origin.cdn77-secure.org", +"cloudns.asia", +"cloudns.biz", +"cloudns.club", +"cloudns.cc", +"cloudns.eu", +"cloudns.in", +"cloudns.info", +"cloudns.org", +"cloudns.pro", +"cloudns.pw", +"cloudns.us", +"cnpy.gdn", +"codeberg.page", +"co.nl", +"co.no", +"webhosting.be", +"hosting-cluster.nl", +"ac.ru", +"edu.ru", +"gov.ru", +"int.ru", +"mil.ru", +"test.ru", +"dyn.cosidns.de", +"dynamisches-dns.de", +"dnsupdater.de", +"internet-dns.de", +"l-o-g-i-n.de", +"dynamic-dns.info", +"feste-ip.net", +"knx-server.net", +"static-access.net", +"realm.cz", +"*.cryptonomic.net", +"cupcake.is", +"curv.dev", +"*.customer-oci.com", +"*.oci.customer-oci.com", +"*.ocp.customer-oci.com", +"*.ocs.customer-oci.com", +"cyon.link", +"cyon.site", +"fnwk.site", +"folionetwork.site", +"platform0.app", +"daplie.me", +"localhost.daplie.me", +"dattolocal.com", +"dattorelay.com", +"dattoweb.com", +"mydatto.com", +"dattolocal.net", +"mydatto.net", +"biz.dk", +"co.dk", +"firm.dk", +"reg.dk", +"store.dk", +"dyndns.dappnode.io", +"*.dapps.earth", +"*.bzz.dapps.earth", +"builtwithdark.com", +"demo.datadetect.com", +"instance.datadetect.com", +"edgestack.me", +"ddns5.com", +"debian.net", +"deno.dev", +"deno-staging.dev", +"dedyn.io", +"deta.app", +"deta.dev", +"*.rss.my.id", +"*.diher.solutions", +"discordsays.com", +"discordsez.com", +"jozi.biz", +"dnshome.de", +"online.th", +"shop.th", +"drayddns.com", +"shoparena.pl", +"dreamhosters.com", +"mydrobo.com", +"drud.io", +"drud.us", +"duckdns.org", +"bip.sh", +"bitbridge.net", +"dy.fi", +"tunk.org", +"dyndns-at-home.com", +"dyndns-at-work.com", +"dyndns-blog.com", +"dyndns-free.com", +"dyndns-home.com", +"dyndns-ip.com", +"dyndns-mail.com", +"dyndns-office.com", +"dyndns-pics.com", +"dyndns-remote.com", +"dyndns-server.com", +"dyndns-web.com", +"dyndns-wiki.com", +"dyndns-work.com", +"dyndns.biz", +"dyndns.info", +"dyndns.org", +"dyndns.tv", +"at-band-camp.net", +"ath.cx", +"barrel-of-knowledge.info", +"barrell-of-knowledge.info", +"better-than.tv", +"blogdns.com", +"blogdns.net", +"blogdns.org", +"blogsite.org", +"boldlygoingnowhere.org", +"broke-it.net", +"buyshouses.net", +"cechire.com", +"dnsalias.com", +"dnsalias.net", +"dnsalias.org", +"dnsdojo.com", +"dnsdojo.net", +"dnsdojo.org", +"does-it.net", +"doesntexist.com", +"doesntexist.org", +"dontexist.com", +"dontexist.net", +"dontexist.org", +"doomdns.com", +"doomdns.org", +"dvrdns.org", +"dyn-o-saur.com", +"dynalias.com", +"dynalias.net", +"dynalias.org", +"dynathome.net", +"dyndns.ws", +"endofinternet.net", +"endofinternet.org", +"endoftheinternet.org", +"est-a-la-maison.com", +"est-a-la-masion.com", +"est-le-patron.com", +"est-mon-blogueur.com", +"for-better.biz", +"for-more.biz", +"for-our.info", +"for-some.biz", +"for-the.biz", +"forgot.her.name", +"forgot.his.name", +"from-ak.com", +"from-al.com", +"from-ar.com", +"from-az.net", +"from-ca.com", +"from-co.net", +"from-ct.com", +"from-dc.com", +"from-de.com", +"from-fl.com", +"from-ga.com", +"from-hi.com", +"from-ia.com", +"from-id.com", +"from-il.com", +"from-in.com", +"from-ks.com", +"from-ky.com", +"from-la.net", +"from-ma.com", +"from-md.com", +"from-me.org", +"from-mi.com", +"from-mn.com", +"from-mo.com", +"from-ms.com", +"from-mt.com", +"from-nc.com", +"from-nd.com", +"from-ne.com", +"from-nh.com", +"from-nj.com", +"from-nm.com", +"from-nv.com", +"from-ny.net", +"from-oh.com", +"from-ok.com", +"from-or.com", +"from-pa.com", +"from-pr.com", +"from-ri.com", +"from-sc.com", +"from-sd.com", +"from-tn.com", +"from-tx.com", +"from-ut.com", +"from-va.com", +"from-vt.com", +"from-wa.com", +"from-wi.com", +"from-wv.com", +"from-wy.com", +"ftpaccess.cc", +"fuettertdasnetz.de", +"game-host.org", +"game-server.cc", +"getmyip.com", +"gets-it.net", +"go.dyndns.org", +"gotdns.com", +"gotdns.org", +"groks-the.info", +"groks-this.info", +"ham-radio-op.net", +"here-for-more.info", +"hobby-site.com", +"hobby-site.org", +"home.dyndns.org", +"homedns.org", +"homeftp.net", +"homeftp.org", +"homeip.net", +"homelinux.com", +"homelinux.net", +"homelinux.org", +"homeunix.com", +"homeunix.net", +"homeunix.org", +"iamallama.com", +"in-the-band.net", +"is-a-anarchist.com", +"is-a-blogger.com", +"is-a-bookkeeper.com", +"is-a-bruinsfan.org", +"is-a-bulls-fan.com", +"is-a-candidate.org", +"is-a-caterer.com", +"is-a-celticsfan.org", +"is-a-chef.com", +"is-a-chef.net", +"is-a-chef.org", +"is-a-conservative.com", +"is-a-cpa.com", +"is-a-cubicle-slave.com", +"is-a-democrat.com", +"is-a-designer.com", +"is-a-doctor.com", +"is-a-financialadvisor.com", +"is-a-geek.com", +"is-a-geek.net", +"is-a-geek.org", +"is-a-green.com", +"is-a-guru.com", +"is-a-hard-worker.com", +"is-a-hunter.com", +"is-a-knight.org", +"is-a-landscaper.com", +"is-a-lawyer.com", +"is-a-liberal.com", +"is-a-libertarian.com", +"is-a-linux-user.org", +"is-a-llama.com", +"is-a-musician.com", +"is-a-nascarfan.com", +"is-a-nurse.com", +"is-a-painter.com", +"is-a-patsfan.org", +"is-a-personaltrainer.com", +"is-a-photographer.com", +"is-a-player.com", +"is-a-republican.com", +"is-a-rockstar.com", +"is-a-socialist.com", +"is-a-soxfan.org", +"is-a-student.com", +"is-a-teacher.com", +"is-a-techie.com", +"is-a-therapist.com", +"is-an-accountant.com", +"is-an-actor.com", +"is-an-actress.com", +"is-an-anarchist.com", +"is-an-artist.com", +"is-an-engineer.com", +"is-an-entertainer.com", +"is-by.us", +"is-certified.com", +"is-found.org", +"is-gone.com", +"is-into-anime.com", +"is-into-cars.com", +"is-into-cartoons.com", +"is-into-games.com", +"is-leet.com", +"is-lost.org", +"is-not-certified.com", +"is-saved.org", +"is-slick.com", +"is-uberleet.com", +"is-very-bad.org", +"is-very-evil.org", +"is-very-good.org", +"is-very-nice.org", +"is-very-sweet.org", +"is-with-theband.com", +"isa-geek.com", +"isa-geek.net", +"isa-geek.org", +"isa-hockeynut.com", +"issmarterthanyou.com", +"isteingeek.de", +"istmein.de", +"kicks-ass.net", +"kicks-ass.org", +"knowsitall.info", +"land-4-sale.us", +"lebtimnetz.de", +"leitungsen.de", +"likes-pie.com", +"likescandy.com", +"merseine.nu", +"mine.nu", +"misconfused.org", +"mypets.ws", +"myphotos.cc", +"neat-url.com", +"office-on-the.net", +"on-the-web.tv", +"podzone.net", +"podzone.org", +"readmyblog.org", +"saves-the-whales.com", +"scrapper-site.net", +"scrapping.cc", +"selfip.biz", +"selfip.com", +"selfip.info", +"selfip.net", +"selfip.org", +"sells-for-less.com", +"sells-for-u.com", +"sells-it.net", +"sellsyourhome.org", +"servebbs.com", +"servebbs.net", +"servebbs.org", +"serveftp.net", +"serveftp.org", +"servegame.org", +"shacknet.nu", +"simple-url.com", +"space-to-rent.com", +"stuff-4-sale.org", +"stuff-4-sale.us", +"teaches-yoga.com", +"thruhere.net", +"traeumtgerade.de", +"webhop.biz", +"webhop.info", +"webhop.net", +"webhop.org", +"worse-than.tv", +"writesthisblog.com", +"ddnss.de", +"dyn.ddnss.de", +"dyndns.ddnss.de", +"dyndns1.de", +"dyn-ip24.de", +"home-webserver.de", +"dyn.home-webserver.de", +"myhome-server.de", +"ddnss.org", +"definima.net", +"definima.io", +"ondigitalocean.app", +"*.digitaloceanspaces.com", +"bci.dnstrace.pro", +"ddnsfree.com", +"ddnsgeek.com", +"giize.com", +"gleeze.com", +"kozow.com", +"loseyourip.com", +"ooguy.com", +"theworkpc.com", +"casacam.net", +"dynu.net", +"accesscam.org", +"camdvr.org", +"freeddns.org", +"mywire.org", +"webredirect.org", +"myddns.rocks", +"blogsite.xyz", +"dynv6.net", +"e4.cz", +"eero.online", +"eero-stage.online", +"elementor.cloud", +"elementor.cool", +"en-root.fr", +"mytuleap.com", +"tuleap-partners.com", +"encr.app", +"encoreapi.com", +"onred.one", +"staging.onred.one", +"eu.encoway.cloud", +"eu.org", +"al.eu.org", +"asso.eu.org", +"at.eu.org", +"au.eu.org", +"be.eu.org", +"bg.eu.org", +"ca.eu.org", +"cd.eu.org", +"ch.eu.org", +"cn.eu.org", +"cy.eu.org", +"cz.eu.org", +"de.eu.org", +"dk.eu.org", +"edu.eu.org", +"ee.eu.org", +"es.eu.org", +"fi.eu.org", +"fr.eu.org", +"gr.eu.org", +"hr.eu.org", +"hu.eu.org", +"ie.eu.org", +"il.eu.org", +"in.eu.org", +"int.eu.org", +"is.eu.org", +"it.eu.org", +"jp.eu.org", +"kr.eu.org", +"lt.eu.org", +"lu.eu.org", +"lv.eu.org", +"mc.eu.org", +"me.eu.org", +"mk.eu.org", +"mt.eu.org", +"my.eu.org", +"net.eu.org", +"ng.eu.org", +"nl.eu.org", +"no.eu.org", +"nz.eu.org", +"paris.eu.org", +"pl.eu.org", +"pt.eu.org", +"q-a.eu.org", +"ro.eu.org", +"ru.eu.org", +"se.eu.org", +"si.eu.org", +"sk.eu.org", +"tr.eu.org", +"uk.eu.org", +"us.eu.org", +"eurodir.ru", +"eu-1.evennode.com", +"eu-2.evennode.com", +"eu-3.evennode.com", +"eu-4.evennode.com", +"us-1.evennode.com", +"us-2.evennode.com", +"us-3.evennode.com", +"us-4.evennode.com", +"twmail.cc", +"twmail.net", +"twmail.org", +"mymailer.com.tw", +"url.tw", +"onfabrica.com", +"apps.fbsbx.com", +"ru.net", +"adygeya.ru", +"bashkiria.ru", +"bir.ru", +"cbg.ru", +"com.ru", +"dagestan.ru", +"grozny.ru", +"kalmykia.ru", +"kustanai.ru", +"marine.ru", +"mordovia.ru", +"msk.ru", +"mytis.ru", +"nalchik.ru", +"nov.ru", +"pyatigorsk.ru", +"spb.ru", +"vladikavkaz.ru", +"vladimir.ru", +"abkhazia.su", +"adygeya.su", +"aktyubinsk.su", +"arkhangelsk.su", +"armenia.su", +"ashgabad.su", +"azerbaijan.su", +"balashov.su", +"bashkiria.su", +"bryansk.su", +"bukhara.su", +"chimkent.su", +"dagestan.su", +"east-kazakhstan.su", +"exnet.su", +"georgia.su", +"grozny.su", +"ivanovo.su", +"jambyl.su", +"kalmykia.su", +"kaluga.su", +"karacol.su", +"karaganda.su", +"karelia.su", +"khakassia.su", +"krasnodar.su", +"kurgan.su", +"kustanai.su", +"lenug.su", +"mangyshlak.su", +"mordovia.su", +"msk.su", +"murmansk.su", +"nalchik.su", +"navoi.su", +"north-kazakhstan.su", +"nov.su", +"obninsk.su", +"penza.su", +"pokrovsk.su", +"sochi.su", +"spb.su", +"tashkent.su", +"termez.su", +"togliatti.su", +"troitsk.su", +"tselinograd.su", +"tula.su", +"tuva.su", +"vladikavkaz.su", +"vladimir.su", +"vologda.su", +"channelsdvr.net", +"u.channelsdvr.net", +"edgecompute.app", +"fastly-terrarium.com", +"fastlylb.net", +"map.fastlylb.net", +"freetls.fastly.net", +"map.fastly.net", +"a.prod.fastly.net", +"global.prod.fastly.net", +"a.ssl.fastly.net", +"b.ssl.fastly.net", +"global.ssl.fastly.net", +"fastvps-server.com", +"fastvps.host", +"myfast.host", +"fastvps.site", +"myfast.space", +"fedorainfracloud.org", +"fedorapeople.org", +"cloud.fedoraproject.org", +"app.os.fedoraproject.org", +"app.os.stg.fedoraproject.org", +"conn.uk", +"copro.uk", +"hosp.uk", +"mydobiss.com", +"fh-muenster.io", +"filegear.me", +"filegear-au.me", +"filegear-de.me", +"filegear-gb.me", +"filegear-ie.me", +"filegear-jp.me", +"filegear-sg.me", +"firebaseapp.com", +"fireweb.app", +"flap.id", +"onflashdrive.app", +"fldrv.com", +"fly.dev", +"edgeapp.net", +"shw.io", +"flynnhosting.net", +"forgeblocks.com", +"id.forgerock.io", +"framer.app", +"framercanvas.com", +"*.frusky.de", +"ravpage.co.il", +"0e.vc", +"freebox-os.com", +"freeboxos.com", +"fbx-os.fr", +"fbxos.fr", +"freebox-os.fr", +"freeboxos.fr", +"freedesktop.org", +"freemyip.com", +"wien.funkfeuer.at", +"*.futurecms.at", +"*.ex.futurecms.at", +"*.in.futurecms.at", +"futurehosting.at", +"futuremailing.at", +"*.ex.ortsinfo.at", +"*.kunden.ortsinfo.at", +"*.statics.cloud", +"independent-commission.uk", +"independent-inquest.uk", +"independent-inquiry.uk", +"independent-panel.uk", +"independent-review.uk", +"public-inquiry.uk", +"royal-commission.uk", +"campaign.gov.uk", +"service.gov.uk", +"api.gov.uk", +"gehirn.ne.jp", +"usercontent.jp", +"gentapps.com", +"gentlentapis.com", +"lab.ms", +"cdn-edges.net", +"ghost.io", +"gsj.bz", +"githubusercontent.com", +"githubpreview.dev", +"github.io", +"gitlab.io", +"gitapp.si", +"gitpage.si", +"glitch.me", +"nog.community", +"co.ro", +"shop.ro", +"lolipop.io", +"angry.jp", +"babyblue.jp", +"babymilk.jp", +"backdrop.jp", +"bambina.jp", +"bitter.jp", +"blush.jp", +"boo.jp", +"boy.jp", +"boyfriend.jp", +"but.jp", +"candypop.jp", +"capoo.jp", +"catfood.jp", +"cheap.jp", +"chicappa.jp", +"chillout.jp", +"chips.jp", +"chowder.jp", +"chu.jp", +"ciao.jp", +"cocotte.jp", +"coolblog.jp", +"cranky.jp", +"cutegirl.jp", +"daa.jp", +"deca.jp", +"deci.jp", +"digick.jp", +"egoism.jp", +"fakefur.jp", +"fem.jp", +"flier.jp", +"floppy.jp", +"fool.jp", +"frenchkiss.jp", +"girlfriend.jp", +"girly.jp", +"gloomy.jp", +"gonna.jp", +"greater.jp", +"hacca.jp", +"heavy.jp", +"her.jp", +"hiho.jp", +"hippy.jp", +"holy.jp", +"hungry.jp", +"icurus.jp", +"itigo.jp", +"jellybean.jp", +"kikirara.jp", +"kill.jp", +"kilo.jp", +"kuron.jp", +"littlestar.jp", +"lolipopmc.jp", +"lolitapunk.jp", +"lomo.jp", +"lovepop.jp", +"lovesick.jp", +"main.jp", +"mods.jp", +"mond.jp", +"mongolian.jp", +"moo.jp", +"namaste.jp", +"nikita.jp", +"nobushi.jp", +"noor.jp", +"oops.jp", +"parallel.jp", +"parasite.jp", +"pecori.jp", +"peewee.jp", +"penne.jp", +"pepper.jp", +"perma.jp", +"pigboat.jp", +"pinoko.jp", +"punyu.jp", +"pupu.jp", +"pussycat.jp", +"pya.jp", +"raindrop.jp", +"readymade.jp", +"sadist.jp", +"schoolbus.jp", +"secret.jp", +"staba.jp", +"stripper.jp", +"sub.jp", +"sunnyday.jp", +"thick.jp", +"tonkotsu.jp", +"under.jp", +"upper.jp", +"velvet.jp", +"verse.jp", +"versus.jp", +"vivian.jp", +"watson.jp", +"weblike.jp", +"whitesnow.jp", +"zombie.jp", +"heteml.net", +"cloudapps.digital", +"london.cloudapps.digital", +"pymnt.uk", +"homeoffice.gov.uk", +"ro.im", +"goip.de", +"run.app", +"a.run.app", +"web.app", +"*.0emm.com", +"appspot.com", +"*.r.appspot.com", +"codespot.com", +"googleapis.com", +"googlecode.com", +"pagespeedmobilizer.com", +"publishproxy.com", +"withgoogle.com", +"withyoutube.com", +"*.gateway.dev", +"cloud.goog", +"translate.goog", +"*.usercontent.goog", +"cloudfunctions.net", +"blogspot.ae", +"blogspot.al", +"blogspot.am", +"blogspot.ba", +"blogspot.be", +"blogspot.bg", +"blogspot.bj", +"blogspot.ca", +"blogspot.cf", +"blogspot.ch", +"blogspot.cl", +"blogspot.co.at", +"blogspot.co.id", +"blogspot.co.il", +"blogspot.co.ke", +"blogspot.co.nz", +"blogspot.co.uk", +"blogspot.co.za", +"blogspot.com", +"blogspot.com.ar", +"blogspot.com.au", +"blogspot.com.br", +"blogspot.com.by", +"blogspot.com.co", +"blogspot.com.cy", +"blogspot.com.ee", +"blogspot.com.eg", +"blogspot.com.es", +"blogspot.com.mt", +"blogspot.com.ng", +"blogspot.com.tr", +"blogspot.com.uy", +"blogspot.cv", +"blogspot.cz", +"blogspot.de", +"blogspot.dk", +"blogspot.fi", +"blogspot.fr", +"blogspot.gr", +"blogspot.hk", +"blogspot.hr", +"blogspot.hu", +"blogspot.ie", +"blogspot.in", +"blogspot.is", +"blogspot.it", +"blogspot.jp", +"blogspot.kr", +"blogspot.li", +"blogspot.lt", +"blogspot.lu", +"blogspot.md", +"blogspot.mk", +"blogspot.mr", +"blogspot.mx", +"blogspot.my", +"blogspot.nl", +"blogspot.no", +"blogspot.pe", +"blogspot.pt", +"blogspot.qa", +"blogspot.re", +"blogspot.ro", +"blogspot.rs", +"blogspot.ru", +"blogspot.se", +"blogspot.sg", +"blogspot.si", +"blogspot.sk", +"blogspot.sn", +"blogspot.td", +"blogspot.tw", +"blogspot.ug", +"blogspot.vn", +"goupile.fr", +"gov.nl", +"awsmppl.com", +"günstigbestellen.de", +"günstigliefern.de", +"fin.ci", +"free.hr", +"caa.li", +"ua.rs", +"conf.se", +"hs.zone", +"hs.run", +"hashbang.sh", +"hasura.app", +"hasura-app.io", +"pages.it.hs-heilbronn.de", +"hepforge.org", +"herokuapp.com", +"herokussl.com", +"ravendb.cloud", +"myravendb.com", +"ravendb.community", +"ravendb.me", +"development.run", +"ravendb.run", +"homesklep.pl", +"secaas.hk", +"hoplix.shop", +"orx.biz", +"biz.gl", +"col.ng", +"firm.ng", +"gen.ng", +"ltd.ng", +"ngo.ng", +"edu.scot", +"sch.so", +"hostyhosting.io", +"häkkinen.fi", +"*.moonscale.io", +"moonscale.net", +"iki.fi", +"ibxos.it", +"iliadboxos.it", +"impertrixcdn.com", +"impertrix.com", +"smushcdn.com", +"wphostedmail.com", +"wpmucdn.com", +"tempurl.host", +"wpmudev.host", +"dyn-berlin.de", +"in-berlin.de", +"in-brb.de", +"in-butter.de", +"in-dsl.de", +"in-dsl.net", +"in-dsl.org", +"in-vpn.de", +"in-vpn.net", +"in-vpn.org", +"biz.at", +"info.at", +"info.cx", +"ac.leg.br", +"al.leg.br", +"am.leg.br", +"ap.leg.br", +"ba.leg.br", +"ce.leg.br", +"df.leg.br", +"es.leg.br", +"go.leg.br", +"ma.leg.br", +"mg.leg.br", +"ms.leg.br", +"mt.leg.br", +"pa.leg.br", +"pb.leg.br", +"pe.leg.br", +"pi.leg.br", +"pr.leg.br", +"rj.leg.br", +"rn.leg.br", +"ro.leg.br", +"rr.leg.br", +"rs.leg.br", +"sc.leg.br", +"se.leg.br", +"sp.leg.br", +"to.leg.br", +"pixolino.com", +"na4u.ru", +"iopsys.se", +"ipifony.net", +"iservschule.de", +"mein-iserv.de", +"schulplattform.de", +"schulserver.de", +"test-iserv.de", +"iserv.dev", +"iobb.net", +"mel.cloudlets.com.au", +"cloud.interhostsolutions.be", +"users.scale.virtualcloud.com.br", +"mycloud.by", +"alp1.ae.flow.ch", +"appengine.flow.ch", +"es-1.axarnet.cloud", +"diadem.cloud", +"vip.jelastic.cloud", +"jele.cloud", +"it1.eur.aruba.jenv-aruba.cloud", +"it1.jenv-aruba.cloud", +"keliweb.cloud", +"cs.keliweb.cloud", +"oxa.cloud", +"tn.oxa.cloud", +"uk.oxa.cloud", +"primetel.cloud", +"uk.primetel.cloud", +"ca.reclaim.cloud", +"uk.reclaim.cloud", +"us.reclaim.cloud", +"ch.trendhosting.cloud", +"de.trendhosting.cloud", +"jele.club", +"amscompute.com", +"clicketcloud.com", +"dopaas.com", +"hidora.com", +"paas.hosted-by-previder.com", +"rag-cloud.hosteur.com", +"rag-cloud-ch.hosteur.com", +"jcloud.ik-server.com", +"jcloud-ver-jpc.ik-server.com", +"demo.jelastic.com", +"kilatiron.com", +"paas.massivegrid.com", +"jed.wafaicloud.com", +"lon.wafaicloud.com", +"ryd.wafaicloud.com", +"j.scaleforce.com.cy", +"jelastic.dogado.eu", +"fi.cloudplatform.fi", +"demo.datacenter.fi", +"paas.datacenter.fi", +"jele.host", +"mircloud.host", +"paas.beebyte.io", +"sekd1.beebyteapp.io", +"jele.io", +"cloud-fr1.unispace.io", +"jc.neen.it", +"cloud.jelastic.open.tim.it", +"jcloud.kz", +"upaas.kazteleport.kz", +"cloudjiffy.net", +"fra1-de.cloudjiffy.net", +"west1-us.cloudjiffy.net", +"jls-sto1.elastx.net", +"jls-sto2.elastx.net", +"jls-sto3.elastx.net", +"faststacks.net", +"fr-1.paas.massivegrid.net", +"lon-1.paas.massivegrid.net", +"lon-2.paas.massivegrid.net", +"ny-1.paas.massivegrid.net", +"ny-2.paas.massivegrid.net", +"sg-1.paas.massivegrid.net", +"jelastic.saveincloud.net", +"nordeste-idc.saveincloud.net", +"j.scaleforce.net", +"jelastic.tsukaeru.net", +"sdscloud.pl", +"unicloud.pl", +"mircloud.ru", +"jelastic.regruhosting.ru", +"enscaled.sg", +"jele.site", +"jelastic.team", +"orangecloud.tn", +"j.layershift.co.uk", +"phx.enscaled.us", +"mircloud.us", +"myjino.ru", +"*.hosting.myjino.ru", +"*.landing.myjino.ru", +"*.spectrum.myjino.ru", +"*.vps.myjino.ru", +"jotelulu.cloud", +"*.triton.zone", +"*.cns.joyent.com", +"js.org", +"kaas.gg", +"khplay.nl", +"ktistory.com", +"kapsi.fi", +"keymachine.de", +"kinghost.net", +"uni5.net", +"knightpoint.systems", +"koobin.events", +"oya.to", +"kuleuven.cloud", +"ezproxy.kuleuven.be", +"co.krd", +"edu.krd", +"krellian.net", +"webthings.io", +"git-repos.de", +"lcube-server.de", +"svn-repos.de", +"leadpages.co", +"lpages.co", +"lpusercontent.com", +"lelux.site", +"co.business", +"co.education", +"co.events", +"co.financial", +"co.network", +"co.place", +"co.technology", +"app.lmpm.com", +"linkyard.cloud", +"linkyard-cloud.ch", +"members.linode.com", +"*.nodebalancer.linode.com", +"*.linodeobjects.com", +"ip.linodeusercontent.com", +"we.bs", +"*.user.localcert.dev", +"localzone.xyz", +"loginline.app", +"loginline.dev", +"loginline.io", +"loginline.services", +"loginline.site", +"servers.run", +"lohmus.me", +"krasnik.pl", +"leczna.pl", +"lubartow.pl", +"lublin.pl", +"poniatowa.pl", +"swidnik.pl", +"glug.org.uk", +"lug.org.uk", +"lugs.org.uk", +"barsy.bg", +"barsy.co.uk", +"barsyonline.co.uk", +"barsycenter.com", +"barsyonline.com", +"barsy.club", +"barsy.de", +"barsy.eu", +"barsy.in", +"barsy.info", +"barsy.io", +"barsy.me", +"barsy.menu", +"barsy.mobi", +"barsy.net", +"barsy.online", +"barsy.org", +"barsy.pro", +"barsy.pub", +"barsy.ro", +"barsy.shop", +"barsy.site", +"barsy.support", +"barsy.uk", +"*.magentosite.cloud", +"mayfirst.info", +"mayfirst.org", +"hb.cldmail.ru", +"cn.vu", +"mazeplay.com", +"mcpe.me", +"mcdir.me", +"mcdir.ru", +"mcpre.ru", +"vps.mcdir.ru", +"mediatech.by", +"mediatech.dev", +"hra.health", +"miniserver.com", +"memset.net", +"messerli.app", +"*.cloud.metacentrum.cz", +"custom.metacentrum.cz", +"flt.cloud.muni.cz", +"usr.cloud.muni.cz", +"meteorapp.com", +"eu.meteorapp.com", +"co.pl", +"*.azurecontainer.io", +"azurewebsites.net", +"azure-mobile.net", +"cloudapp.net", +"azurestaticapps.net", +"1.azurestaticapps.net", +"centralus.azurestaticapps.net", +"eastasia.azurestaticapps.net", +"eastus2.azurestaticapps.net", +"westeurope.azurestaticapps.net", +"westus2.azurestaticapps.net", +"csx.cc", +"mintere.site", +"forte.id", +"mozilla-iot.org", +"bmoattachments.org", +"net.ru", +"org.ru", +"pp.ru", +"hostedpi.com", +"customer.mythic-beasts.com", +"caracal.mythic-beasts.com", +"fentiger.mythic-beasts.com", +"lynx.mythic-beasts.com", +"ocelot.mythic-beasts.com", +"oncilla.mythic-beasts.com", +"onza.mythic-beasts.com", +"sphinx.mythic-beasts.com", +"vs.mythic-beasts.com", +"x.mythic-beasts.com", +"yali.mythic-beasts.com", +"cust.retrosnub.co.uk", +"ui.nabu.casa", +"pony.club", +"of.fashion", +"in.london", +"of.london", +"from.marketing", +"with.marketing", +"for.men", +"repair.men", +"and.mom", +"for.mom", +"for.one", +"under.one", +"for.sale", +"that.win", +"from.work", +"to.work", +"cloud.nospamproxy.com", +"netlify.app", +"4u.com", +"ngrok.io", +"nh-serv.co.uk", +"nfshost.com", +"*.developer.app", +"noop.app", +"*.northflank.app", +"*.build.run", +"*.code.run", +"*.database.run", +"*.migration.run", +"noticeable.news", +"dnsking.ch", +"mypi.co", +"n4t.co", +"001www.com", +"ddnslive.com", +"myiphost.com", +"forumz.info", +"16-b.it", +"32-b.it", +"64-b.it", +"soundcast.me", +"tcp4.me", +"dnsup.net", +"hicam.net", +"now-dns.net", +"ownip.net", +"vpndns.net", +"dynserv.org", +"now-dns.org", +"x443.pw", +"now-dns.top", +"ntdll.top", +"freeddns.us", +"crafting.xyz", +"zapto.xyz", +"nsupdate.info", +"nerdpol.ovh", +"blogsyte.com", +"brasilia.me", +"cable-modem.org", +"ciscofreak.com", +"collegefan.org", +"couchpotatofries.org", +"damnserver.com", +"ddns.me", +"ditchyourip.com", +"dnsfor.me", +"dnsiskinky.com", +"dvrcam.info", +"dynns.com", +"eating-organic.net", +"fantasyleague.cc", +"geekgalaxy.com", +"golffan.us", +"health-carereform.com", +"homesecuritymac.com", +"homesecuritypc.com", +"hopto.me", +"ilovecollege.info", +"loginto.me", +"mlbfan.org", +"mmafan.biz", +"myactivedirectory.com", +"mydissent.net", +"myeffect.net", +"mymediapc.net", +"mypsx.net", +"mysecuritycamera.com", +"mysecuritycamera.net", +"mysecuritycamera.org", +"net-freaks.com", +"nflfan.org", +"nhlfan.net", +"no-ip.ca", +"no-ip.co.uk", +"no-ip.net", +"noip.us", +"onthewifi.com", +"pgafan.net", +"point2this.com", +"pointto.us", +"privatizehealthinsurance.net", +"quicksytes.com", +"read-books.org", +"securitytactics.com", +"serveexchange.com", +"servehumour.com", +"servep2p.com", +"servesarcasm.com", +"stufftoread.com", +"ufcfan.org", +"unusualperson.com", +"workisboring.com", +"3utilities.com", +"bounceme.net", +"ddns.net", +"ddnsking.com", +"gotdns.ch", +"hopto.org", +"myftp.biz", +"myftp.org", +"myvnc.com", +"no-ip.biz", +"no-ip.info", +"no-ip.org", +"noip.me", +"redirectme.net", +"servebeer.com", +"serveblog.net", +"servecounterstrike.com", +"serveftp.com", +"servegame.com", +"servehalflife.com", +"servehttp.com", +"serveirc.com", +"serveminecraft.net", +"servemp3.com", +"servepics.com", +"servequake.com", +"sytes.net", +"webhop.me", +"zapto.org", +"stage.nodeart.io", +"pcloud.host", +"nyc.mn", +"static.observableusercontent.com", +"cya.gg", +"omg.lol", +"cloudycluster.net", +"omniwe.site", +"service.one", +"nid.io", +"opensocial.site", +"opencraft.hosting", +"orsites.com", +"operaunite.com", +"tech.orange", +"authgear-staging.com", +"authgearapps.com", +"skygearapp.com", +"outsystemscloud.com", +"*.webpaas.ovh.net", +"*.hosting.ovh.net", +"ownprovider.com", +"own.pm", +"*.owo.codes", +"ox.rs", +"oy.lc", +"pgfog.com", +"pagefrontapp.com", +"pagexl.com", +"*.paywhirl.com", +"bar0.net", +"bar1.net", +"bar2.net", +"rdv.to", +"art.pl", +"gliwice.pl", +"krakow.pl", +"poznan.pl", +"wroc.pl", +"zakopane.pl", +"pantheonsite.io", +"gotpantheon.com", +"mypep.link", +"perspecta.cloud", +"lk3.ru", +"on-web.fr", +"bc.platform.sh", +"ent.platform.sh", +"eu.platform.sh", +"us.platform.sh", +"*.platformsh.site", +"*.tst.site", +"platter-app.com", +"platter-app.dev", +"platterp.us", +"pdns.page", +"plesk.page", +"pleskns.com", +"dyn53.io", +"onporter.run", +"co.bn", +"postman-echo.com", +"pstmn.io", +"mock.pstmn.io", +"httpbin.org", +"prequalifyme.today", +"xen.prgmr.com", +"priv.at", +"prvcy.page", +"*.dweb.link", +"protonet.io", +"chirurgiens-dentistes-en-france.fr", +"byen.site", +"pubtls.org", +"pythonanywhere.com", +"eu.pythonanywhere.com", +"qoto.io", +"qualifioapp.com", +"qbuser.com", +"cloudsite.builders", +"instances.spawn.cc", +"instantcloud.cn", +"ras.ru", +"qa2.com", +"qcx.io", +"*.sys.qcx.io", +"dev-myqnapcloud.com", +"alpha-myqnapcloud.com", +"myqnapcloud.com", +"*.quipelements.com", +"vapor.cloud", +"vaporcloud.io", +"rackmaze.com", +"rackmaze.net", +"g.vbrplsbx.io", +"*.on-k3s.io", +"*.on-rancher.cloud", +"*.on-rio.io", +"readthedocs.io", +"rhcloud.com", +"app.render.com", +"onrender.com", +"repl.co", +"id.repl.co", +"repl.run", +"resindevice.io", +"devices.resinstaging.io", +"hzc.io", +"wellbeingzone.eu", +"wellbeingzone.co.uk", +"adimo.co.uk", +"itcouldbewor.se", +"git-pages.rit.edu", +"rocky.page", +"биз.рус", +"ком.рус", +"крым.рус", +"мир.рус", +"мск.рус", +"орг.рус", +"самара.рус", +"сочи.рус", +"спб.рус", +"я.рус", +"*.builder.code.com", +"*.dev-builder.code.com", +"*.stg-builder.code.com", +"sandcats.io", +"logoip.de", +"logoip.com", +"fr-par-1.baremetal.scw.cloud", +"fr-par-2.baremetal.scw.cloud", +"nl-ams-1.baremetal.scw.cloud", +"fnc.fr-par.scw.cloud", +"functions.fnc.fr-par.scw.cloud", +"k8s.fr-par.scw.cloud", +"nodes.k8s.fr-par.scw.cloud", +"s3.fr-par.scw.cloud", +"s3-website.fr-par.scw.cloud", +"whm.fr-par.scw.cloud", +"priv.instances.scw.cloud", +"pub.instances.scw.cloud", +"k8s.scw.cloud", +"k8s.nl-ams.scw.cloud", +"nodes.k8s.nl-ams.scw.cloud", +"s3.nl-ams.scw.cloud", +"s3-website.nl-ams.scw.cloud", +"whm.nl-ams.scw.cloud", +"k8s.pl-waw.scw.cloud", +"nodes.k8s.pl-waw.scw.cloud", +"s3.pl-waw.scw.cloud", +"s3-website.pl-waw.scw.cloud", +"scalebook.scw.cloud", +"smartlabeling.scw.cloud", +"dedibox.fr", +"schokokeks.net", +"gov.scot", +"service.gov.scot", +"scrysec.com", +"firewall-gateway.com", +"firewall-gateway.de", +"my-gateway.de", +"my-router.de", +"spdns.de", +"spdns.eu", +"firewall-gateway.net", +"my-firewall.org", +"myfirewall.org", +"spdns.org", +"seidat.net", +"sellfy.store", +"senseering.net", +"minisite.ms", +"magnet.page", +"biz.ua", +"co.ua", +"pp.ua", +"shiftcrypto.dev", +"shiftcrypto.io", +"shiftedit.io", +"myshopblocks.com", +"myshopify.com", +"shopitsite.com", +"shopware.store", +"mo-siemens.io", +"1kapp.com", +"appchizi.com", +"applinzi.com", +"sinaapp.com", +"vipsinaapp.com", +"siteleaf.net", +"bounty-full.com", +"alpha.bounty-full.com", +"beta.bounty-full.com", +"small-web.org", +"vp4.me", +"try-snowplow.com", +"srht.site", +"stackhero-network.com", +"musician.io", +"novecore.site", +"static.land", +"dev.static.land", +"sites.static.land", +"storebase.store", +"vps-host.net", +"atl.jelastic.vps-host.net", +"njs.jelastic.vps-host.net", +"ric.jelastic.vps-host.net", +"playstation-cloud.com", +"apps.lair.io", +"*.stolos.io", +"spacekit.io", +"customer.speedpartner.de", +"myspreadshop.at", +"myspreadshop.com.au", +"myspreadshop.be", +"myspreadshop.ca", +"myspreadshop.ch", +"myspreadshop.com", +"myspreadshop.de", +"myspreadshop.dk", +"myspreadshop.es", +"myspreadshop.fi", +"myspreadshop.fr", +"myspreadshop.ie", +"myspreadshop.it", +"myspreadshop.net", +"myspreadshop.nl", +"myspreadshop.no", +"myspreadshop.pl", +"myspreadshop.se", +"myspreadshop.co.uk", +"api.stdlib.com", +"storj.farm", +"utwente.io", +"soc.srcf.net", +"user.srcf.net", +"temp-dns.com", +"supabase.co", +"supabase.in", +"supabase.net", +"su.paba.se", +"*.s5y.io", +"*.sensiosite.cloud", +"syncloud.it", +"dscloud.biz", +"direct.quickconnect.cn", +"dsmynas.com", +"familyds.com", +"diskstation.me", +"dscloud.me", +"i234.me", +"myds.me", +"synology.me", +"dscloud.mobi", +"dsmynas.net", +"familyds.net", +"dsmynas.org", +"familyds.org", +"vpnplus.to", +"direct.quickconnect.to", +"tabitorder.co.il", +"taifun-dns.de", +"beta.tailscale.net", +"ts.net", +"gda.pl", +"gdansk.pl", +"gdynia.pl", +"med.pl", +"sopot.pl", +"site.tb-hosting.com", +"edugit.io", +"s3.teckids.org", +"telebit.app", +"telebit.io", +"*.telebit.xyz", +"gwiddle.co.uk", +"*.firenet.ch", +"*.svc.firenet.ch", +"reservd.com", +"thingdustdata.com", +"cust.dev.thingdust.io", +"cust.disrec.thingdust.io", +"cust.prod.thingdust.io", +"cust.testing.thingdust.io", +"reservd.dev.thingdust.io", +"reservd.disrec.thingdust.io", +"reservd.testing.thingdust.io", +"tickets.io", +"arvo.network", +"azimuth.network", +"tlon.network", +"torproject.net", +"pages.torproject.net", +"bloxcms.com", +"townnews-staging.com", +"tbits.me", +"12hp.at", +"2ix.at", +"4lima.at", +"lima-city.at", +"12hp.ch", +"2ix.ch", +"4lima.ch", +"lima-city.ch", +"trafficplex.cloud", +"de.cool", +"12hp.de", +"2ix.de", +"4lima.de", +"lima-city.de", +"1337.pictures", +"clan.rip", +"lima-city.rocks", +"webspace.rocks", +"lima.zone", +"*.transurl.be", +"*.transurl.eu", +"*.transurl.nl", +"site.transip.me", +"tuxfamily.org", +"dd-dns.de", +"diskstation.eu", +"diskstation.org", +"dray-dns.de", +"draydns.de", +"dyn-vpn.de", +"dynvpn.de", +"mein-vigor.de", +"my-vigor.de", +"my-wan.de", +"syno-ds.de", +"synology-diskstation.de", +"synology-ds.de", +"typedream.app", +"pro.typeform.com", +"uber.space", +"*.uberspace.de", +"hk.com", +"hk.org", +"ltd.hk", +"inc.hk", +"name.pm", +"sch.tf", +"biz.wf", +"sch.wf", +"org.yt", +"virtualuser.de", +"virtual-user.de", +"upli.io", +"urown.cloud", +"dnsupdate.info", +"lib.de.us", +"2038.io", +"vercel.app", +"vercel.dev", +"now.sh", +"router.management", +"v-info.info", +"voorloper.cloud", +"neko.am", +"nyaa.am", +"be.ax", +"cat.ax", +"es.ax", +"eu.ax", +"gg.ax", +"mc.ax", +"us.ax", +"xy.ax", +"nl.ci", +"xx.gl", +"app.gp", +"blog.gt", +"de.gt", +"to.gt", +"be.gy", +"cc.hn", +"blog.kg", +"io.kg", +"jp.kg", +"tv.kg", +"uk.kg", +"us.kg", +"de.ls", +"at.md", +"de.md", +"jp.md", +"to.md", +"indie.porn", +"vxl.sh", +"ch.tc", +"me.tc", +"we.tc", +"nyan.to", +"at.vg", +"blog.vu", +"dev.vu", +"me.vu", +"v.ua", +"*.vultrobjects.com", +"wafflecell.com", +"*.webhare.dev", +"reserve-online.net", +"reserve-online.com", +"bookonline.app", +"hotelwithflight.com", +"wedeploy.io", +"wedeploy.me", +"wedeploy.sh", +"remotewd.com", +"pages.wiardweb.com", +"wmflabs.org", +"toolforge.org", +"wmcloud.org", +"panel.gg", +"daemon.panel.gg", +"messwithdns.com", +"woltlab-demo.com", +"myforum.community", +"community-pro.de", +"diskussionsbereich.de", +"community-pro.net", +"meinforum.net", +"affinitylottery.org.uk", +"raffleentry.org.uk", +"weeklylottery.org.uk", +"wpenginepowered.com", +"js.wpenginepowered.com", +"wixsite.com", +"editorx.io", +"half.host", +"xnbay.com", +"u2.xnbay.com", +"u2-local.xnbay.com", +"cistron.nl", +"demon.nl", +"xs4all.space", +"yandexcloud.net", +"storage.yandexcloud.net", +"website.yandexcloud.net", +"official.academy", +"yolasite.com", +"ybo.faith", +"yombo.me", +"homelink.one", +"ybo.party", +"ybo.review", +"ybo.science", +"ybo.trade", +"ynh.fr", +"nohost.me", +"noho.st", +"za.net", +"za.org", +"bss.design", +"basicserver.io", +"virtualserver.io", +"enterprisecloud.nu" +] \ No newline at end of file diff --git a/node_modules/psl/dist/psl.js b/node_modules/psl/dist/psl.js new file mode 100644 index 0000000000..2e967dfe5b --- /dev/null +++ b/node_modules/psl/dist/psl.js @@ -0,0 +1,10187 @@ +(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.psl = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + +},{"./data/rules.json":1,"punycode":3}],3:[function(require,module,exports){ +(function (global){(function (){ +/*! https://mths.be/punycode v1.4.1 by @mathias */ +;(function(root) { + + /** Detect free variables */ + var freeExports = typeof exports == 'object' && exports && + !exports.nodeType && exports; + var freeModule = typeof module == 'object' && module && + !module.nodeType && module; + var freeGlobal = typeof global == 'object' && global; + if ( + freeGlobal.global === freeGlobal || + freeGlobal.window === freeGlobal || + freeGlobal.self === freeGlobal + ) { + root = freeGlobal; + } + + /** + * The `punycode` object. + * @name punycode + * @type Object + */ + var punycode, + + /** Highest positive signed 32-bit float value */ + maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1 + + /** Bootstring parameters */ + base = 36, + tMin = 1, + tMax = 26, + skew = 38, + damp = 700, + initialBias = 72, + initialN = 128, // 0x80 + delimiter = '-', // '\x2D' + + /** Regular expressions */ + regexPunycode = /^xn--/, + regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars + regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators + + /** Error messages */ + errors = { + 'overflow': 'Overflow: input needs wider integers to process', + 'not-basic': 'Illegal input >= 0x80 (not a basic code point)', + 'invalid-input': 'Invalid input' + }, + + /** Convenience shortcuts */ + baseMinusTMin = base - tMin, + floor = Math.floor, + stringFromCharCode = String.fromCharCode, + + /** Temporary variable */ + key; + + /*--------------------------------------------------------------------------*/ + + /** + * A generic error utility function. + * @private + * @param {String} type The error type. + * @returns {Error} Throws a `RangeError` with the applicable error message. + */ + function error(type) { + throw new RangeError(errors[type]); + } + + /** + * A generic `Array#map` utility function. + * @private + * @param {Array} array The array to iterate over. + * @param {Function} callback The function that gets called for every array + * item. + * @returns {Array} A new array of values returned by the callback function. + */ + function map(array, fn) { + var length = array.length; + var result = []; + while (length--) { + result[length] = fn(array[length]); + } + return result; + } + + /** + * A simple `Array#map`-like wrapper to work with domain name strings or email + * addresses. + * @private + * @param {String} domain The domain name or email address. + * @param {Function} callback The function that gets called for every + * character. + * @returns {Array} A new string of characters returned by the callback + * function. + */ + function mapDomain(string, fn) { + var parts = string.split('@'); + var result = ''; + if (parts.length > 1) { + // In email addresses, only the domain name should be punycoded. Leave + // the local part (i.e. everything up to `@`) intact. + result = parts[0] + '@'; + string = parts[1]; + } + // Avoid `split(regex)` for IE8 compatibility. See #17. + string = string.replace(regexSeparators, '\x2E'); + var labels = string.split('.'); + var encoded = map(labels, fn).join('.'); + return result + encoded; + } + + /** + * Creates an array containing the numeric code points of each Unicode + * character in the string. While JavaScript uses UCS-2 internally, + * this function will convert a pair of surrogate halves (each of which + * UCS-2 exposes as separate characters) into a single code point, + * matching UTF-16. + * @see `punycode.ucs2.encode` + * @see + * @memberOf punycode.ucs2 + * @name decode + * @param {String} string The Unicode input string (UCS-2). + * @returns {Array} The new array of code points. + */ + function ucs2decode(string) { + var output = [], + counter = 0, + length = string.length, + value, + extra; + while (counter < length) { + value = string.charCodeAt(counter++); + if (value >= 0xD800 && value <= 0xDBFF && counter < length) { + // high surrogate, and there is a next character + extra = string.charCodeAt(counter++); + if ((extra & 0xFC00) == 0xDC00) { // low surrogate + output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000); + } else { + // unmatched surrogate; only append this code unit, in case the next + // code unit is the high surrogate of a surrogate pair + output.push(value); + counter--; + } + } else { + output.push(value); + } + } + return output; + } + + /** + * Creates a string based on an array of numeric code points. + * @see `punycode.ucs2.decode` + * @memberOf punycode.ucs2 + * @name encode + * @param {Array} codePoints The array of numeric code points. + * @returns {String} The new Unicode string (UCS-2). + */ + function ucs2encode(array) { + return map(array, function(value) { + var output = ''; + if (value > 0xFFFF) { + value -= 0x10000; + output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800); + value = 0xDC00 | value & 0x3FF; + } + output += stringFromCharCode(value); + return output; + }).join(''); + } + + /** + * Converts a basic code point into a digit/integer. + * @see `digitToBasic()` + * @private + * @param {Number} codePoint The basic numeric code point value. + * @returns {Number} The numeric value of a basic code point (for use in + * representing integers) in the range `0` to `base - 1`, or `base` if + * the code point does not represent a value. + */ + function basicToDigit(codePoint) { + if (codePoint - 48 < 10) { + return codePoint - 22; + } + if (codePoint - 65 < 26) { + return codePoint - 65; + } + if (codePoint - 97 < 26) { + return codePoint - 97; + } + return base; + } + + /** + * Converts a digit/integer into a basic code point. + * @see `basicToDigit()` + * @private + * @param {Number} digit The numeric value of a basic code point. + * @returns {Number} The basic code point whose value (when used for + * representing integers) is `digit`, which needs to be in the range + * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is + * used; else, the lowercase form is used. The behavior is undefined + * if `flag` is non-zero and `digit` has no uppercase form. + */ + function digitToBasic(digit, flag) { + // 0..25 map to ASCII a..z or A..Z + // 26..35 map to ASCII 0..9 + return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5); + } + + /** + * Bias adaptation function as per section 3.4 of RFC 3492. + * https://tools.ietf.org/html/rfc3492#section-3.4 + * @private + */ + function adapt(delta, numPoints, firstTime) { + var k = 0; + delta = firstTime ? floor(delta / damp) : delta >> 1; + delta += floor(delta / numPoints); + for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) { + delta = floor(delta / baseMinusTMin); + } + return floor(k + (baseMinusTMin + 1) * delta / (delta + skew)); + } + + /** + * Converts a Punycode string of ASCII-only symbols to a string of Unicode + * symbols. + * @memberOf punycode + * @param {String} input The Punycode string of ASCII-only symbols. + * @returns {String} The resulting string of Unicode symbols. + */ + function decode(input) { + // Don't use UCS-2 + var output = [], + inputLength = input.length, + out, + i = 0, + n = initialN, + bias = initialBias, + basic, + j, + index, + oldi, + w, + k, + digit, + t, + /** Cached calculation results */ + baseMinusT; + + // Handle the basic code points: let `basic` be the number of input code + // points before the last delimiter, or `0` if there is none, then copy + // the first basic code points to the output. + + basic = input.lastIndexOf(delimiter); + if (basic < 0) { + basic = 0; + } + + for (j = 0; j < basic; ++j) { + // if it's not a basic code point + if (input.charCodeAt(j) >= 0x80) { + error('not-basic'); + } + output.push(input.charCodeAt(j)); + } + + // Main decoding loop: start just after the last delimiter if any basic code + // points were copied; start at the beginning otherwise. + + for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) { + + // `index` is the index of the next character to be consumed. + // Decode a generalized variable-length integer into `delta`, + // which gets added to `i`. The overflow checking is easier + // if we increase `i` as we go, then subtract off its starting + // value at the end to obtain `delta`. + for (oldi = i, w = 1, k = base; /* no condition */; k += base) { + + if (index >= inputLength) { + error('invalid-input'); + } + + digit = basicToDigit(input.charCodeAt(index++)); + + if (digit >= base || digit > floor((maxInt - i) / w)) { + error('overflow'); + } + + i += digit * w; + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + + if (digit < t) { + break; + } + + baseMinusT = base - t; + if (w > floor(maxInt / baseMinusT)) { + error('overflow'); + } + + w *= baseMinusT; + + } + + out = output.length + 1; + bias = adapt(i - oldi, out, oldi == 0); + + // `i` was supposed to wrap around from `out` to `0`, + // incrementing `n` each time, so we'll fix that now: + if (floor(i / out) > maxInt - n) { + error('overflow'); + } + + n += floor(i / out); + i %= out; + + // Insert `n` at position `i` of the output + output.splice(i++, 0, n); + + } + + return ucs2encode(output); + } + + /** + * Converts a string of Unicode symbols (e.g. a domain name label) to a + * Punycode string of ASCII-only symbols. + * @memberOf punycode + * @param {String} input The string of Unicode symbols. + * @returns {String} The resulting Punycode string of ASCII-only symbols. + */ + function encode(input) { + var n, + delta, + handledCPCount, + basicLength, + bias, + j, + m, + q, + k, + t, + currentValue, + output = [], + /** `inputLength` will hold the number of code points in `input`. */ + inputLength, + /** Cached calculation results */ + handledCPCountPlusOne, + baseMinusT, + qMinusT; + + // Convert the input in UCS-2 to Unicode + input = ucs2decode(input); + + // Cache the length + inputLength = input.length; + + // Initialize the state + n = initialN; + delta = 0; + bias = initialBias; + + // Handle the basic code points + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue < 0x80) { + output.push(stringFromCharCode(currentValue)); + } + } + + handledCPCount = basicLength = output.length; + + // `handledCPCount` is the number of code points that have been handled; + // `basicLength` is the number of basic code points. + + // Finish the basic string - if it is not empty - with a delimiter + if (basicLength) { + output.push(delimiter); + } + + // Main encoding loop: + while (handledCPCount < inputLength) { + + // All non-basic code points < n have been handled already. Find the next + // larger one: + for (m = maxInt, j = 0; j < inputLength; ++j) { + currentValue = input[j]; + if (currentValue >= n && currentValue < m) { + m = currentValue; + } + } + + // Increase `delta` enough to advance the decoder's state to , + // but guard against overflow + handledCPCountPlusOne = handledCPCount + 1; + if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) { + error('overflow'); + } + + delta += (m - n) * handledCPCountPlusOne; + n = m; + + for (j = 0; j < inputLength; ++j) { + currentValue = input[j]; + + if (currentValue < n && ++delta > maxInt) { + error('overflow'); + } + + if (currentValue == n) { + // Represent delta as a generalized variable-length integer + for (q = delta, k = base; /* no condition */; k += base) { + t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias); + if (q < t) { + break; + } + qMinusT = q - t; + baseMinusT = base - t; + output.push( + stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0)) + ); + q = floor(qMinusT / baseMinusT); + } + + output.push(stringFromCharCode(digitToBasic(q, 0))); + bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength); + delta = 0; + ++handledCPCount; + } + } + + ++delta; + ++n; + + } + return output.join(''); + } + + /** + * Converts a Punycode string representing a domain name or an email address + * to Unicode. Only the Punycoded parts of the input will be converted, i.e. + * it doesn't matter if you call it on a string that has already been + * converted to Unicode. + * @memberOf punycode + * @param {String} input The Punycoded domain name or email address to + * convert to Unicode. + * @returns {String} The Unicode representation of the given Punycode + * string. + */ + function toUnicode(input) { + return mapDomain(input, function(string) { + return regexPunycode.test(string) + ? decode(string.slice(4).toLowerCase()) + : string; + }); + } + + /** + * Converts a Unicode string representing a domain name or an email address to + * Punycode. Only the non-ASCII parts of the domain name will be converted, + * i.e. it doesn't matter if you call it with a domain that's already in + * ASCII. + * @memberOf punycode + * @param {String} input The domain name or email address to convert, as a + * Unicode string. + * @returns {String} The Punycode representation of the given domain name or + * email address. + */ + function toASCII(input) { + return mapDomain(input, function(string) { + return regexNonASCII.test(string) + ? 'xn--' + encode(string) + : string; + }); + } + + /*--------------------------------------------------------------------------*/ + + /** Define the public API */ + punycode = { + /** + * A string representing the current Punycode.js version number. + * @memberOf punycode + * @type String + */ + 'version': '1.4.1', + /** + * An object of methods to convert from JavaScript's internal character + * representation (UCS-2) to Unicode code points, and back. + * @see + * @memberOf punycode + * @type Object + */ + 'ucs2': { + 'decode': ucs2decode, + 'encode': ucs2encode + }, + 'decode': decode, + 'encode': encode, + 'toASCII': toASCII, + 'toUnicode': toUnicode + }; + + /** Expose `punycode` */ + // Some AMD build optimizers, like r.js, check for specific condition patterns + // like the following: + if ( + typeof define == 'function' && + typeof define.amd == 'object' && + define.amd + ) { + define('punycode', function() { + return punycode; + }); + } else if (freeExports && freeModule) { + if (module.exports == freeExports) { + // in Node.js, io.js, or RingoJS v0.8.0+ + freeModule.exports = punycode; + } else { + // in Narwhal or RingoJS v0.7.0- + for (key in punycode) { + punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]); + } + } + } else { + // in Rhino or a web browser + root.punycode = punycode; + } + +}(this)); + +}).call(this)}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {}) +},{}]},{},[2])(2) +}); diff --git a/node_modules/psl/dist/psl.min.js b/node_modules/psl/dist/psl.min.js new file mode 100644 index 0000000000..cbcd8eb3e7 --- /dev/null +++ b/node_modules/psl/dist/psl.min.js @@ -0,0 +1 @@ +!function(a){"object"==typeof exports&&"undefined"!=typeof module?module.exports=a():"function"==typeof define&&define.amd?define([],a):("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).psl=a()}(function(){return function e(s,n,t){function m(o,a){if(!n[o]){if(!s[o]){var i="function"==typeof require&&require;if(!a&&i)return i(o,!0);if(u)return u(o,!0);throw(a=new Error("Cannot find module '"+o+"'")).code="MODULE_NOT_FOUND",a}i=n[o]={exports:{}},s[o][0].call(i.exports,function(a){return m(s[o][1][a]||a)},i,i.exports,e,s,n,t)}return n[o].exports}for(var u="function"==typeof require&&require,a=0;a= 0x80 (not a basic code point)","invalid-input":"Invalid input"},l=j-1,y=Math.floor,f=String.fromCharCode;function v(a){throw new RangeError(c[a])}function k(a,o){for(var i=a.length,e=[];i--;)e[i]=o(a[i]);return e}function g(a,o){var i=a.split("@"),e="",i=(1>>10&1023|55296),a=56320|1023&a),o+=f(a)}).join("")}function z(a,o){return a+22+75*(a<26)-((0!=o)<<5)}function x(a,o,i){var e=0;for(a=i?y(a/m):a>>1,a+=y(a/o);l*b>>1y((d-p)/n))&&v("overflow"),p+=m*n,!(m<(m=t<=l?1:l+b<=t?b:t-l));t+=j)n>y(d/(m=j-m))&&v("overflow"),n*=m;l=x(p-s,o=u.length+1,0==s),y(p/o)>d-c&&v("overflow"),c+=y(p/o),p%=o,u.splice(p++,0,c)}return h(u)}function A(a){for(var o,i,e,s,n,t,m,u,r,p,c=[],l=(a=w(a)).length,k=128,g=72,h=o=0;hy((d-o)/(u=i+1))&&v("overflow"),o+=(s-k)*u,k=s,h=0;hd&&v("overflow"),m==k){for(n=o,t=j;!(n<(r=t<=g?1:g+b<=t?b:t-g));t+=j)c.push(f(z(r+(p=n-r)%(r=j-r),0))),n=y(p/r);c.push(f(z(n,0))),g=x(o,u,i==e),o=0,++i}++o,++k}return c.join("")}if(s={version:"1.4.1",ucs2:{decode:w,encode:h},decode:q,encode:A,toASCII:function(a){return g(a,function(a){return r.test(a)?"xn--"+A(a):a})},toUnicode:function(a){return g(a,function(a){return u.test(a)?q(a.slice(4).toLowerCase()):a})}},o&&i)if(_.exports==o)i.exports=s;else for(n in s)s.hasOwnProperty(n)&&(o[n]=s[n]);else a.punycode=s}.call(this)}.call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}]},{},[2])(2)}); diff --git a/node_modules/psl/index.js b/node_modules/psl/index.js new file mode 100644 index 0000000000..da7bc12136 --- /dev/null +++ b/node_modules/psl/index.js @@ -0,0 +1,269 @@ +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ +'use strict'; + + +var Punycode = require('punycode'); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = require('./data/rules.json').map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; diff --git a/node_modules/psl/package.json b/node_modules/psl/package.json new file mode 100644 index 0000000000..baddd50bfb --- /dev/null +++ b/node_modules/psl/package.json @@ -0,0 +1,43 @@ +{ + "name": "psl", + "version": "1.9.0", + "description": "Domain name parser based on the Public Suffix List", + "repository": { + "type": "git", + "url": "git@github.com:lupomontero/psl.git" + }, + "main": "index.js", + "scripts": { + "lint": "eslint .", + "test": "mocha test/*.spec.js", + "test:browserstack": "node test/browserstack.js", + "watch": "mocha test --watch", + "prebuild": "./scripts/update-rules.js", + "build": "browserify ./index.js --standalone=psl > ./dist/psl.js", + "postbuild": "cat ./dist/psl.js | uglifyjs -c -m > ./dist/psl.min.js", + "commit-and-pr": "commit-and-pr", + "changelog": "git log $(git describe --tags --abbrev=0)..HEAD --oneline --format=\"%h %s (%an <%ae>)\"" + }, + "keywords": [ + "publicsuffix", + "publicsuffixlist" + ], + "author": "Lupo Montero (https://lupomontero.com/)", + "license": "MIT", + "devDependencies": { + "browserify": "^17.0.0", + "browserslist-browserstack": "^3.1.1", + "browserstack-local": "^1.5.1", + "chai": "^4.3.6", + "commit-and-pr": "^1.0.4", + "eslint": "^8.19.0", + "JSONStream": "^1.3.5", + "mocha": "^7.2.0", + "porch": "^2.0.0", + "request": "^2.88.2", + "selenium-webdriver": "^4.3.0", + "serve-handler": "^6.1.3", + "uglify-js": "^3.16.2", + "watchify": "^4.0.0" + } +} diff --git a/node_modules/sax/LICENSE b/node_modules/sax/LICENSE new file mode 100644 index 0000000000..ccffa082c9 --- /dev/null +++ b/node_modules/sax/LICENSE @@ -0,0 +1,41 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +==== + +`String.fromCodePoint` by Mathias Bynens used according to terms of MIT +License, as follows: + + Copyright Mathias Bynens + + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/sax/README.md b/node_modules/sax/README.md new file mode 100644 index 0000000000..afcd3f3dd6 --- /dev/null +++ b/node_modules/sax/README.md @@ -0,0 +1,225 @@ +# sax js + +A sax-style parser for XML and HTML. + +Designed with [node](http://nodejs.org/) in mind, but should work fine in +the browser or other CommonJS implementations. + +## What This Is + +* A very simple tool to parse through an XML string. +* A stepping stone to a streaming HTML parser. +* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML + docs. + +## What This Is (probably) Not + +* An HTML Parser - That's a fine goal, but this isn't it. It's just + XML. +* A DOM Builder - You can use it to build an object model out of XML, + but it doesn't do that out of the box. +* XSLT - No DOM = no querying. +* 100% Compliant with (some other SAX implementation) - Most SAX + implementations are in Java and do a lot more than this does. +* An XML Validator - It does a little validation when in strict mode, but + not much. +* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic + masochism. +* A DTD-aware Thing - Fetching DTDs is a much bigger job. + +## Regarding `Hello, world!').close(); + +// stream usage +// takes the same options as the parser +var saxStream = require("sax").createStream(strict, options) +saxStream.on("error", function (e) { + // unhandled errors will throw, since this is a proper node + // event emitter. + console.error("error!", e) + // clear the error + this._parser.error = null + this._parser.resume() +}) +saxStream.on("opentag", function (node) { + // same object as above +}) +// pipe is supported, and it's readable/writable +// same chunks coming in also go out. +fs.createReadStream("file.xml") + .pipe(saxStream) + .pipe(fs.createWriteStream("file-copy.xml")) +``` + + +## Arguments + +Pass the following arguments to the parser function. All are optional. + +`strict` - Boolean. Whether or not to be a jerk. Default: `false`. + +`opt` - Object bag of settings regarding string formatting. All default to `false`. + +Settings supported: + +* `trim` - Boolean. Whether or not to trim text and comment nodes. +* `normalize` - Boolean. If true, then turn any whitespace into a single + space. +* `lowercase` - Boolean. If true, then lowercase tag names and attribute names + in loose mode, rather than uppercasing them. +* `xmlns` - Boolean. If true, then namespaces are supported. +* `position` - Boolean. If false, then don't track line/col/position. +* `strictEntities` - Boolean. If true, only parse [predefined XML + entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent) + (`&`, `'`, `>`, `<`, and `"`) + +## Methods + +`write` - Write bytes onto the stream. You don't have to do this all at +once. You can keep writing as much as you want. + +`close` - Close the stream. Once closed, no more data may be written until +it is done processing the buffer, which is signaled by the `end` event. + +`resume` - To gracefully handle errors, assign a listener to the `error` +event. Then, when the error is taken care of, you can call `resume` to +continue parsing. Otherwise, the parser will not continue while in an error +state. + +## Members + +At all times, the parser object will have the following members: + +`line`, `column`, `position` - Indications of the position in the XML +document where the parser currently is looking. + +`startTagPosition` - Indicates the position where the current tag starts. + +`closed` - Boolean indicating whether or not the parser can be written to. +If it's `true`, then wait for the `ready` event to write again. + +`strict` - Boolean indicating whether or not the parser is a jerk. + +`opt` - Any options passed into the constructor. + +`tag` - The current tag being dealt with. + +And a bunch of other stuff that you probably shouldn't touch. + +## Events + +All events emit with a single argument. To listen to an event, assign a +function to `on`. Functions get executed in the this-context of +the parser object. The list of supported events are also in the exported +`EVENTS` array. + +When using the stream interface, assign handlers using the EventEmitter +`on` function in the normal fashion. + +`error` - Indication that something bad happened. The error will be hanging +out on `parser.error`, and must be deleted before parsing can continue. By +listening to this event, you can keep an eye on that kind of stuff. Note: +this happens *much* more in strict mode. Argument: instance of `Error`. + +`text` - Text node. Argument: string of text. + +`doctype` - The ``. Argument: +object with `name` and `body` members. Attributes are not parsed, as +processing instructions have implementation dependent semantics. + +`sgmldeclaration` - Random SGML declarations. Stuff like `` +would trigger this kind of event. This is a weird thing to support, so it +might go away at some point. SAX isn't intended to be used to parse SGML, +after all. + +`opentagstart` - Emitted immediately when the tag name is available, +but before any attributes are encountered. Argument: object with a +`name` field and an empty `attributes` set. Note that this is the +same object that will later be emitted in the `opentag` event. + +`opentag` - An opening tag. Argument: object with `name` and `attributes`. +In non-strict mode, tag names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, then it will contain +namespace binding information on the `ns` member, and will have a +`local`, `prefix`, and `uri` member. + +`closetag` - A closing tag. In loose mode, tags are auto-closed if their +parent closes. In strict mode, well-formedness is enforced. Note that +self-closing tags will have `closeTag` emitted immediately after `openTag`. +Argument: tag name. + +`attribute` - An attribute node. Argument: object with `name` and `value`. +In non-strict mode, attribute names are uppercased, unless the `lowercase` +option is set. If the `xmlns` option is set, it will also contains namespace +information. + +`comment` - A comment node. Argument: the string of the comment. + +`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"` +event, and their contents are not checked for special xml characters. +If you pass `noscript: true`, then this behavior is suppressed. + +## Reporting Problems + +It's best to write a failing test if you find an issue. I will always +accept pull requests with failing tests if they demonstrate intended +behavior, but it is very hard to figure out what issue you're describing +without a test. Writing a test is also the best way for you yourself +to figure out if you really understand the issue you think you have with +sax-js. diff --git a/node_modules/sax/lib/sax.js b/node_modules/sax/lib/sax.js new file mode 100644 index 0000000000..795d607ef6 --- /dev/null +++ b/node_modules/sax/lib/sax.js @@ -0,0 +1,1565 @@ +;(function (sax) { // wrapper for non-node envs + sax.parser = function (strict, opt) { return new SAXParser(strict, opt) } + sax.SAXParser = SAXParser + sax.SAXStream = SAXStream + sax.createStream = createStream + + // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns. + // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)), + // since that's the earliest that a buffer overrun could occur. This way, checks are + // as rare as required, but as often as necessary to ensure never crossing this bound. + // Furthermore, buffers are only tested at most once per write(), so passing a very + // large string into write() might have undesirable effects, but this is manageable by + // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme + // edge case, result in creating at most one complete copy of the string passed in. + // Set to Infinity to have unlimited buffers. + sax.MAX_BUFFER_LENGTH = 64 * 1024 + + var buffers = [ + 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype', + 'procInstName', 'procInstBody', 'entity', 'attribName', + 'attribValue', 'cdata', 'script' + ] + + sax.EVENTS = [ + 'text', + 'processinginstruction', + 'sgmldeclaration', + 'doctype', + 'comment', + 'opentagstart', + 'attribute', + 'opentag', + 'closetag', + 'opencdata', + 'cdata', + 'closecdata', + 'error', + 'end', + 'ready', + 'script', + 'opennamespace', + 'closenamespace' + ] + + function SAXParser (strict, opt) { + if (!(this instanceof SAXParser)) { + return new SAXParser(strict, opt) + } + + var parser = this + clearBuffers(parser) + parser.q = parser.c = '' + parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH + parser.opt = opt || {} + parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags + parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase' + parser.tags = [] + parser.closed = parser.closedRoot = parser.sawRoot = false + parser.tag = parser.error = null + parser.strict = !!strict + parser.noscript = !!(strict || parser.opt.noscript) + parser.state = S.BEGIN + parser.strictEntities = parser.opt.strictEntities + parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES) + parser.attribList = [] + + // namespaces form a prototype chain. + // it always points at the current tag, + // which protos to its parent tag. + if (parser.opt.xmlns) { + parser.ns = Object.create(rootNS) + } + + // mostly just for error reporting + parser.trackPosition = parser.opt.position !== false + if (parser.trackPosition) { + parser.position = parser.line = parser.column = 0 + } + emit(parser, 'onready') + } + + if (!Object.create) { + Object.create = function (o) { + function F () {} + F.prototype = o + var newf = new F() + return newf + } + } + + if (!Object.keys) { + Object.keys = function (o) { + var a = [] + for (var i in o) if (o.hasOwnProperty(i)) a.push(i) + return a + } + } + + function checkBufferLength (parser) { + var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10) + var maxActual = 0 + for (var i = 0, l = buffers.length; i < l; i++) { + var len = parser[buffers[i]].length + if (len > maxAllowed) { + // Text/cdata nodes can get big, and since they're buffered, + // we can get here under normal conditions. + // Avoid issues by emitting the text node now, + // so at least it won't get any bigger. + switch (buffers[i]) { + case 'textNode': + closeText(parser) + break + + case 'cdata': + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + break + + case 'script': + emitNode(parser, 'onscript', parser.script) + parser.script = '' + break + + default: + error(parser, 'Max buffer length exceeded: ' + buffers[i]) + } + } + maxActual = Math.max(maxActual, len) + } + // schedule the next check for the earliest possible buffer overrun. + var m = sax.MAX_BUFFER_LENGTH - maxActual + parser.bufferCheckPosition = m + parser.position + } + + function clearBuffers (parser) { + for (var i = 0, l = buffers.length; i < l; i++) { + parser[buffers[i]] = '' + } + } + + function flushBuffers (parser) { + closeText(parser) + if (parser.cdata !== '') { + emitNode(parser, 'oncdata', parser.cdata) + parser.cdata = '' + } + if (parser.script !== '') { + emitNode(parser, 'onscript', parser.script) + parser.script = '' + } + } + + SAXParser.prototype = { + end: function () { end(this) }, + write: write, + resume: function () { this.error = null; return this }, + close: function () { return this.write(null) }, + flush: function () { flushBuffers(this) } + } + + var Stream + try { + Stream = require('stream').Stream + } catch (ex) { + Stream = function () {} + } + + var streamWraps = sax.EVENTS.filter(function (ev) { + return ev !== 'error' && ev !== 'end' + }) + + function createStream (strict, opt) { + return new SAXStream(strict, opt) + } + + function SAXStream (strict, opt) { + if (!(this instanceof SAXStream)) { + return new SAXStream(strict, opt) + } + + Stream.apply(this) + + this._parser = new SAXParser(strict, opt) + this.writable = true + this.readable = true + + var me = this + + this._parser.onend = function () { + me.emit('end') + } + + this._parser.onerror = function (er) { + me.emit('error', er) + + // if didn't throw, then means error was handled. + // go ahead and clear error, so we can write again. + me._parser.error = null + } + + this._decoder = null + + streamWraps.forEach(function (ev) { + Object.defineProperty(me, 'on' + ev, { + get: function () { + return me._parser['on' + ev] + }, + set: function (h) { + if (!h) { + me.removeAllListeners(ev) + me._parser['on' + ev] = h + return h + } + me.on(ev, h) + }, + enumerable: true, + configurable: false + }) + }) + } + + SAXStream.prototype = Object.create(Stream.prototype, { + constructor: { + value: SAXStream + } + }) + + SAXStream.prototype.write = function (data) { + if (typeof Buffer === 'function' && + typeof Buffer.isBuffer === 'function' && + Buffer.isBuffer(data)) { + if (!this._decoder) { + var SD = require('string_decoder').StringDecoder + this._decoder = new SD('utf8') + } + data = this._decoder.write(data) + } + + this._parser.write(data.toString()) + this.emit('data', data) + return true + } + + SAXStream.prototype.end = function (chunk) { + if (chunk && chunk.length) { + this.write(chunk) + } + this._parser.end() + return true + } + + SAXStream.prototype.on = function (ev, handler) { + var me = this + if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) { + me._parser['on' + ev] = function () { + var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments) + args.splice(0, 0, ev) + me.emit.apply(me, args) + } + } + + return Stream.prototype.on.call(me, ev, handler) + } + + // this really needs to be replaced with character classes. + // XML allows all manner of ridiculous numbers and digits. + var CDATA = '[CDATA[' + var DOCTYPE = 'DOCTYPE' + var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace' + var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/' + var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE } + + // http://www.w3.org/TR/REC-xml/#NT-NameStartChar + // This implementation works on strings, a single character at a time + // as such, it cannot ever support astral-plane characters (10000-EFFFF) + // without a significant breaking change to either this parser, or the + // JavaScript language. Implementation of an emoji-capable xml parser + // is left as an exercise for the reader. + var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + + var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/ + var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/ + + function isWhitespace (c) { + return c === ' ' || c === '\n' || c === '\r' || c === '\t' + } + + function isQuote (c) { + return c === '"' || c === '\'' + } + + function isAttribEnd (c) { + return c === '>' || isWhitespace(c) + } + + function isMatch (regex, c) { + return regex.test(c) + } + + function notMatch (regex, c) { + return !isMatch(regex, c) + } + + var S = 0 + sax.STATE = { + BEGIN: S++, // leading byte order mark or whitespace + BEGIN_WHITESPACE: S++, // leading whitespace + TEXT: S++, // general stuff + TEXT_ENTITY: S++, // & and such. + OPEN_WAKA: S++, // < + SGML_DECL: S++, // + SCRIPT: S++, //